ngram
listlengths
0
67.8k
[ "results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3,", "tempfile from server import db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory()", "= db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 3), (2, 4), 0)", "dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with", "logger as lg: lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id':", "logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) with logger", "with logger as lg: lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [", "'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self):", "3), (2, 4), 0) lg.add((2, 3), (5, 7), (6, 8), 2) with logger", "0, (1400, 1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id':", "logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db')", "3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger", "'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser': 3,", "1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) lg.add((2, 3), (5,", "3, 0, 1550, 1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1,", "(2, 3), 0, (1400, 1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1],", "'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger as", "with logger as lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1, 2, 3,", "= db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0,", "self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger as lg:", "as lg: lg.add((0, 1), (1, 2), (2, 3), (0, 0)) results = logger.get()", "6), (7, 8), 2, (1300, 1700), (1510, 1490)) with logger as lg: deleted", "'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger =", "logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted, [ {'id': 0, 'label':", "(1, 2), (2, 3), (0, 0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], {", "from server import db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as", "1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger", "with logger as lg: lg.add((0, 1), ('foo', 'bar')) with logger as lg: deleted", "with logger as lg: lg.add((0, 1), (1, 2), (2, 3), (0, 0)) results", "@use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1,", "0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser':", "'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg:", "test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2),", "test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo') lg.add(1, 'bar')", "logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0,", "1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner':", "1), (1, 3), (2, 4), 0) lg.add((2, 3), (5, 7), (6, 8), 2)", "lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1,", "fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile()", "logger as lg: lg.add(0, 1, 2, 0) lg.add(1, 2, 3, 0) results =", "test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2),", "def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo',", "= db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) with logger as", "results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with", "as f: logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self,", "2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger", "lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser':", "self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase):", "'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def", "}) @use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0,", "'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger =", "with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase):", "results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3,", "@use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo')", "utf-8 -*- import os from unittest import TestCase import tempfile from server import", "(1, 3), (2, 4), 0) lg.add((2, 3), (5, 7), (6, 8), 2) with", "0) lg.add((2, 3), (5, 7), (6, 8), 2) with logger as lg: deleted", "as lg: deleted = lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id': 1, 'label':", "logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3),", "wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger =", "1600.0) lg.add(1, 2, 3, 0, 1550, 1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1],", "results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with", "1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db')", "= logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger", "filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 3), (2,", "= logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id':", "lg.add((0, 1), (1, 2), (2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1],", "def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results,", "as f: logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self,", "0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename)", "{'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename):", "0, }) @use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger as lg:", "[]) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add(0,", "TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results = logger.get()", "results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with", "lg: lg.add(0, 1, 2, 0) lg.add(1, 2, 3, 0) results = logger.get() self.assertEqual(len(results),", "'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger =", "2, (1300, 1700), (1510, 1490)) with logger as lg: deleted = lg.delete(0) results", "lg: lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) results", "0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger", "'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg:", "'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename)", "def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db", "2), (2, 3), 0, (1400, 1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2)", "'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger as", "def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename))", "= logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser': 8, 'trigger_id':", "1), ('foo', 'bar')) with logger as lg: deleted = lg.delete('foo') results = logger.get()", "7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner':", "2, 0, 1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550, 1450) results = logger.get()", "'id': 3, 'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, })", "2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0,", "3), (0, 0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner':", "'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger =", "wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db class", "lg.add(0, 1, 2, 0) lg.add(1, 2, 3, 0) results = logger.get() self.assertEqual(len(results), 2)", "(1400, 1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1,", "3, 'winner': 7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id':", "[ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self,", "logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0) results = logger.get(ordered=True)", "1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name)", "import TestCase import tempfile from server import db def use_temp_db(filename): def _use_temp_db(fn): def", "test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0,", "self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser': 4, 'trigger_id': 0, })", "3), (5, 6), (7, 8), 2, (1300, 1700), (1510, 1490)) with logger as", "(1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results),", "8), 2, (1300, 1700), (1510, 1490)) with logger as lg: deleted = lg.delete(0)", "(5, 7), (6, 8), 2) with logger as lg: deleted = lg.delete(0) results", "1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self,", "1550), (1600, 1450)) lg.add((2, 3), (5, 6), (7, 8), 2, (1300, 1700), (1510,", "@use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1),", "def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results,", "logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser': 8, 'trigger_id': 2,", "test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, [])", "@use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1),", "lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550, 1450) results", "0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger", "'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as", "self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate':", "self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def", "def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1,", "1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550, 1450) results = logger.get() self.assertEqual(len(results), 2)", "1), (1, 2), (2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], {", "self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger as lg:", "filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f:", "'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename): logger =", "def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2,", "1, 2, 0) lg.add(1, 2, 3, 0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1],", "lg: lg.add((0, 1), (1, 2), (2, 3), (0, 0)) results = logger.get() self.assertEqual(len(results),", "1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger", "db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0) results", "1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id':", "}) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0,", "as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id':", "0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger", "1550, 1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2,", "return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger", "'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def", "self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate':", "'winner': 2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename):", "2) self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0,", "3, 'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as", "3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2,", "1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class", "filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2,", "{'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with", "as lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550,", "@use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1),", "filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo') lg.add(1, 'bar') results", "tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def", "= db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0)", "with logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0) results =", "0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename)", "{'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with", "8), 2) with logger as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results),", "_use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results", "-*- coding: utf-8 -*- import os from unittest import TestCase import tempfile from", "logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser': 8, 'trigger_id': 2,", "'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile()", "db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0, (1400,", "1450)) lg.add((2, 3), (5, 6), (7, 8), 2, (1300, 1700), (1510, 1490)) with", "= db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger =", "with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db')", "{ 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self,", "logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 3), (2, 4),", "as lg: lg.add((0, 1), ('foo', 'bar')) with logger as lg: deleted = lg.delete('foo')", "self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate':", "2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db')", "lg: lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [ {'id': 0, 'label':", "= db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3), (0,", "test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0)", "lg.add((2, 3), (5, 7), (6, 8), 2) with logger as lg: deleted =", "class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results =", "'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], {", "'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with", "tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def", "'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename):", "0, (1400, 1550), (1600, 1450)) lg.add((2, 3), (5, 6), (7, 8), 2, (1300,", "lg.add(1, 2, 3, 0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1,", "'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename):", "@use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1,", "self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate':", "as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self):", "1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser':", "(1400, 1550), (1600, 1450)) lg.add((2, 3), (5, 6), (7, 8), 2, (1300, 1700),", "1, 'winner': 3, 'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with", "logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger as", "-*- import os from unittest import TestCase import tempfile from server import db", "lg.add((2, 3), (5, 6), (7, 8), 2, (1300, 1700), (1510, 1490)) with logger", "deleted = lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ])", "(1600, 1450)) lg.add((2, 3), (5, 6), (7, 8), 2, (1300, 1700), (1510, 1490))", "f: logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename):", "2, 0) lg.add(1, 2, 3, 0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], {", "'bar')) with logger as lg: deleted = lg.delete('foo') results = logger.get() self.assertEqual(results, [", "}) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results", "= logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id':", "db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 3), (2, 4), 0) lg.add((2,", "self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate':", "0) lg.add(1, 2, 3, 0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id':", "}) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger as", "(6, 8), 2) with logger as lg: deleted = lg.delete(0) results = logger.get()", "0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name)", "= db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger =", "class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results =", "= db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0) lg.add(1, 2, 3,", "'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser': 4,", "'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id':", "'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label':", "db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) with logger as lg:", "self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, })", "'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f:", "db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname,", "}) @use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0,", "2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser': 4, 'trigger_id':", "(2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner':", "[ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self,", "lg: deleted = lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'},", "logger as lg: lg.add((0, 1), (1, 2), (2, 3), (0, 0)) results =", "3, 'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted),", "with logger as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1],", "db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename)", "'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename):", "]) @use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0,", "4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger", "0, 1550, 1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner':", "with logger as lg: lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [", "as lg: lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0,", "{ 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0,", "1, 2, 0, 1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550, 1450) results =", "= logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger", "db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results,", "'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with", "2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename)", "3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger", "{ 'id': 3, 'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0,", "self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg:", "tempfile.NamedTemporaryFile() as f: logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def", "return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name)", "(7, 8), 2, (1300, 1700), (1510, 1490)) with logger as lg: deleted =", "lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser':", "results = logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'},", "lg: lg.add((0, 1), (1, 2), (2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2)", "'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase):", "def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo',", "test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 3),", "2) self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted),", "test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2),", "logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger as", "os from unittest import TestCase import tempfile from server import db def use_temp_db(filename):", "1), (1, 2), (2, 3), (0, 0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1],", "(2, 3), (0, 0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1,", "filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) with", "TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results = logger.get()", "= logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ])", "'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename): logger = db.MatchResultDBController(filename) with", "lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550, 1450)", "= logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger", "[]) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add(0,", "class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results =", "= lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted,", "logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger", "self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def", "logger as lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1, 2, 3, 0,", "server import db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname:", "8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser':", "os.path.join(dirname, filename)) return wrapper return _use_temp_db class TestMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as", "= logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted, [ {'id': 0,", "'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1,", "'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with", "0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser':", "as lg: lg.add(0, 1, 2, 0) lg.add(1, 2, 3, 0) results = logger.get()", "'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger as", "test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar'))", "import db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj,", "2), (2, 3), (0, 0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id':", "as f: logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self,", "(1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2,", "= logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser': 8, 'trigger_id':", "1700), (1510, 1490)) with logger as lg: deleted = lg.delete(0) results = logger.get()", "TestCase import tempfile from server import db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj):", "_use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper return", "def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1,", "test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar'))", "= db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1,", "(1300, 1700), (1510, 1490)) with logger as lg: deleted = lg.delete(0) results =", "0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger as lg:", "coding: utf-8 -*- import os from unittest import TestCase import tempfile from server", "with logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550),", "logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as", "logger as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], {", "@use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1),", "{'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename):", "}) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser': 4, 'trigger_id': 0,", "(5, 6), (7, 8), 2, (1300, 1700), (1510, 1490)) with logger as lg:", "as lg: lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [ {'id': 0,", "'id': 3, 'winner': 7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], {", "0, 1400.0, 1600.0) lg.add(1, 2, 3, 0, 1550, 1450) results = logger.get() self.assertEqual(len(results),", "lg: lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label':", "'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger as", "def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1,", "('foo', 'bar')) with logger as lg: deleted = lg.delete('foo') results = logger.get() self.assertEqual(results,", "results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser': 8,", "}) @use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0,", "logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0)", "1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger", "'winner': 6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2)", "lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) lg.add((2, 3),", "import os from unittest import TestCase import tempfile from server import db def", "lg.add(1, 2, 3, 0, 1550, 1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], {", "results = logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted, [ {'id':", "[ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted, [ {'id': 0, 'label': 'foo'} ])", "2), (2, 3), 0, (1400, 1550), (1600, 1450)) lg.add((2, 3), (5, 6), (7,", "1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg:", "lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) results =", "1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self, filename): logger", "self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser': 8, 'trigger_id': 2, })", "tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def", "2) with logger as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2)", "= lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 6,", "f: logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename):", "'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3,", "self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate':", "'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as", "def test_delete(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1,", "'winner': 3, 'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile()", "logger as lg: deleted = lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id': 1,", "(0, 0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2,", "2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0,", "2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self,", "6, 'loser': 8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1],", "'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with", "lg: lg.add((0, 1), (1, 3), (2, 4), 0) lg.add((2, 3), (5, 7), (6,", "[]) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0,", "2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self,", "logger = db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0) lg.add(1, 2,", "db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0, 1400.0, 1600.0) lg.add(1, 2,", "1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser':", "logger = db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo') lg.add(1, 'bar') results =", "logger as lg: lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [ {'id':", "@use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1),", "(1510, 1490)) with logger as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results),", "2, 3, 0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner':", "logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger", "'winner': 7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1,", "1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger", "db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.MatchResultDBController(filename)", "with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db')", "0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as", "8, 'trigger_id': 2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id':", "lg.add((0, 1), (1, 2), (2, 3), (0, 0)) results = logger.get() self.assertEqual(len(results), 2)", "self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg:", "results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 6, 'loser': 8,", "logger as lg: lg.add((0, 1), ('foo', 'bar')) with logger as lg: deleted =", "deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner':", "1) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0,", "'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger =", "lg: lg.add((0, 1), ('foo', 'bar')) with logger as lg: deleted = lg.delete('foo') results", "1450.0, }) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg:", "@use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1),", "self.assertEqual(results[1], { 'id': 3, 'winner': 7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2)", "(2, 3), 0, (1400, 1550), (1600, 1450)) lg.add((2, 3), (5, 6), (7, 8),", "7), (6, 8), 2) with logger as lg: deleted = lg.delete(0) results =", "3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger", "from unittest import TestCase import tempfile from server import db def use_temp_db(filename): def", "3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename) with logger", "logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db')", "import tempfile from server import db def use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with", "= db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True)", "'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, })", "logger as lg: lg.add((0, 1), (1, 3), (2, 4), 0) lg.add((2, 3), (5,", "lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'},", "= lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3, 'winner': 7,", "{ 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db')", "logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger", "with logger as lg: lg.add(0, 1, 2, 0) lg.add(1, 2, 3, 0) results", "3, 0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2,", "filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0) lg.add(1,", "2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id,", "db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger = db.RatedMatchResultDBController(filename)", "('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1,", "1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger", "'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2,", "@use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1),", "0)) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser':", "'id': 1, 'winner': 3, 'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def test_get_from_blank(self):", "3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.MatchResultDBController(filename)", "}) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0,", "logger as lg: lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550), (1600,", "def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results,", "def test_add_one(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2,", "3), (5, 7), (6, 8), 2) with logger as lg: deleted = lg.delete(0)", "{ 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_delete(self,", "(1, 2), (2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id':", "= db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo') lg.add(1, 'bar') results = logger.get()", "2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 3, 'loser': 4, 'trigger_id': 0, }) class", "as lg: lg.add((0, 1), (1, 2), (2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results),", "}) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results", "f: logger = db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename):", "as lg: lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450))", "2), (2, 3), 0) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1,", "2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger = db.MatchResultDBController(filename)", "]) @use_temp_db('test.db') def test_add_list(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0,", "as lg: lg.add((0, 1), (1, 3), (2, 4), 0) lg.add((2, 3), (5, 7),", "db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3), (0, 0))", "'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1) @use_temp_db('test.db') def test_add_list(self, filename): logger =", "lg.add((0, 1), (1, 3), (2, 4), 0) lg.add((2, 3), (5, 7), (6, 8),", "db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results,", "4), 0) lg.add((2, 3), (5, 7), (6, 8), 2) with logger as lg:", "use_temp_db(filename): def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return", "lg.add((0, 1), ('foo', 'bar')) results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'},", "filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2,", "1490)) with logger as lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2)", "2, 'winner_rate': 1700.0, 'loser_rate': 1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner':", "'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename):", "test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2),", "3), 0, (1400, 1550), (1600, 1450)) lg.add((2, 3), (5, 6), (7, 8), 2,", "{ 'id': 3, 'winner': 7, 'loser': 8, 'trigger_id': 2, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1],", "logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1, 2), (2, 3),", "1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) results = logger.get(ordered=True)", "{ 'id': 1, 'winner': 3, 'loser': 4, 'trigger_id': 0, }) class TestRatedMatchResultDBController(TestCase): def", "results = logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'},", "'foo'}, {'id': 1, 'label': 'bar'}, ]) @use_temp_db('test.db') def test_delete(self, filename): logger = db.ItemLabelDBController(filename)", "self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted, [ {'id': 0, 'label': 'foo'}", "unittest import TestCase import tempfile from server import db def use_temp_db(filename): def _use_temp_db(fn):", "with logger as lg: lg.add((0, 1), (1, 3), (2, 4), 0) lg.add((2, 3),", "TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results = logger.get()", "filename): logger = db.RatedMatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0, 1400.0,", "1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser':", "logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) results =", "1550.0, 'loser_rate': 1450.0, }) @use_temp_db('test.db') def test_add_delete(self, filename): logger = db.RatedMatchResultDBController(filename) with logger", "'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f:", "= logger.get(ordered=True) self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label': 'bar'}, ])", "self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0, }) self.assertEqual(logger.current_id, 1)", "(1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) lg.add((2, 3), (5, 6),", "db.MatchResultDBController(filename) with logger as lg: lg.add(0, 1, 2, 0) lg.add(1, 2, 3, 0)", "test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.ItemLabelDBController(f.name) results = logger.get() self.assertEqual(results, [])", "2, 'loser': 3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def", "'bar') results = logger.get() self.assertEqual(results, [ {'id': 0, 'label': 'foo'}, {'id': 1, 'label':", "# -*- coding: utf-8 -*- import os from unittest import TestCase import tempfile", "with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db')", "def test_add_list(self, filename): logger = db.MatchResultDBController(filename) with logger as lg: lg.add((0, 1), (1,", "lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id': 1, 'label': 'bar'}, ]) self.assertEqual(deleted, [", "def _use_temp_db(fn): def wrapper(obj): with tempfile.TemporaryDirectory() as dirname: fn(obj, os.path.join(dirname, filename)) return wrapper", "def test_add_one(self, filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add(0, 'foo') lg.add(1,", "logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id': 0,", "lg: lg.add((0, 1), (1, 2), (2, 3), 0, (1400, 1550), (1600, 1450)) lg.add((2,", "3, 'trigger_id': 0, 'winner_rate': 1550.0, 'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with", "(2, 4), 0) lg.add((2, 3), (5, 7), (6, 8), 2) with logger as", "lg.add((0, 1), ('foo', 'bar')) with logger as lg: deleted = lg.delete('foo') results =", "'winner': 2, 'loser': 3, 'trigger_id': 0, }) @use_temp_db('test.db') def test_add_list2(self, filename): logger =", "3), 0, (1400, 1550), (1600, 1450)) results = logger.get(ordered=True) self.assertEqual(len(results), 2) self.assertEqual(results[1], {", "1490.0, }) self.assertEqual(len(deleted), 2) self.assertEqual(deleted[1], { 'id': 1, 'winner': 2, 'loser': 3, 'trigger_id':", "lg: deleted = lg.delete(0) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id': 3,", "= db.RatedMatchResultDBController(f.name) results = logger.get() self.assertEqual(results, []) @use_temp_db('test.db') def test_add_one(self, filename): logger =", "2, 3, 0, 1550, 1450) results = logger.get() self.assertEqual(len(results), 2) self.assertEqual(results[1], { 'id':", "'loser_rate': 1450.0, }) class TestItemLabelDBController(TestCase): def test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger =", "with logger as lg: deleted = lg.delete('foo') results = logger.get() self.assertEqual(results, [ {'id':", "test_get_from_blank(self): with tempfile.NamedTemporaryFile() as f: logger = db.MatchResultDBController(f.name) results = logger.get() self.assertEqual(results, [])", "filename): logger = db.ItemLabelDBController(filename) with logger as lg: lg.add((0, 1), ('foo', 'bar')) results" ]
[ "n < 1: raise ValueError(\"Cannot operate on empty array !\") elif order >", "from scipy.fftpack import fft,ifft from scipy.signal import gaussian from ..helper import fix_time_points, nextpow2", "Toeplitz, the corresponding pxp matrix is defined by p items only). Generally the", "lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients. Return the order + 1 LPC", "Hermitian toeplitz equation: _ _ -R[1] = R[0] R[1] ... R[p-1] a[1] :", "!= 0\") # Estimated coefficients a = np.empty(order+1, 'float32') # temporary array t", "= _acorr_last_axis(x, nfft, maxlag) if axis != -1: a = np.swapaxes(a, -1, axis)", "fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients. Return the order", "a[0] = 1. e = r[0] for i in range(1, order+1): acc =", "= np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr / (2 * np.pi)) frq_inds", "'gaussian': window = gaussian(nperseg + 2, 0.45 * (nperseg - 1) / 2)[1:nperseg", "missing output[indices[i] / new_sr] = formants return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000,", "must have a lenght >= lpc order\") if order > 0: p =", "array to invert (since the matrix is symmetric Toeplitz, the corresponding pxp matrix", "efficiently solve symmetric linear systems with toeplitz structure. Parameters --------- r : array-like", "in the matrix, the inversion can be done in O(p^2) instead of O(p^3).", "axis != -1: a = np.swapaxes(a, -1, axis) return a #@jit def lpc(signal,", "_acorr_last_axis(x, nfft, maxlag) if axis != -1: a = np.swapaxes(a, -1, axis) return", "R[0] a[p] _ with respect to a ( is the complex conjugate). Using", "-alpha], 1, signal) if sr > new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg", "the matrix is symmetric Toeplitz, the corresponding pxp matrix is defined by p", "# Reflection coefficients k = np.empty(order, 'float32') a[0] = 1. e = r[0]", "np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape =", "for i in range(1, order+1): acc = r[i] for j in range(1, i):", "(the output will have order + 1 items) Notes ---- This is just", "None)] * missing output[indices[i] / new_sr] = formants return output def signal_to_formants(signal, sr,", "the Linear Prediction Coefficients. Return the order + 1 LPC coefficients for the", "= k[i-1] for j in range(order): t[j] = a[j] for j in range(1,", "np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr / (2 * np.pi)) frq_inds =", "autocorrelation of the signal for linear prediction coefficients estimation. The first item must", "* (new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal,", "max_freq alpha = np.exp(-2 * np.pi * 50 * (1 / new_sr)) proc", "will have order + 1 items) Notes ---- This is just for reference,", "toeplitz structure. Parameters --------- r : array-like input array to invert (since the", "np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg / 2) + 1, nperstep) num_frames =", "use acorr directly is for speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input", "purpose only. Levinson is a well-known algorithm to solve the Hermitian toeplitz equation:", "!= -1: x = np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft, maxlag) if", "sr, num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'): output = {} new_sr =", "order) def process_frame(X, window, num_formants, new_sr): X = X * window A, e,", "This is just for reference, as it is using the direct inversion of", "0: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2) +", "gaussian(nperseg + 2, 0.45 * (nperseg - 1) / 2)[1:nperseg + 1] else:", "r.size if n < 1: raise ValueError(\"Cannot operate on empty array !\") elif", "non zero values in autocorrelation one needs for p LPC # coefficients nx", "really slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array of rank > 1 not", "= x.shape[axis] nfft = int(2 ** nextpow2(2 * maxlag - 1)) if axis", "Prediction Coefficients. Return the order + 1 LPC coefficients for the signal. c", "= np.swapaxes(a, -1, axis) return a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute the", "= signal.shape[0] / sr return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq,", "a = np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return a[..., :maxlag+1] / x.shape[-1]", "Number of non zero values in autocorrelation one needs for p LPC #", "item must be a non zero real. Notes ---- This implementation is in", "f < 50: continue if f > max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j])))", "the autocorrelation matrix inversion, and fft for the autocorrelation computation. For small order,", "max_freq, win_len, time_step): sig, sr = librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr,", "be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item should be != 0\") #", "(new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr,", "# Estimated coefficients a = np.empty(order+1, 'float32') # temporary array t = np.empty(order+1,", "minimized. Parameters ---------- signal: array_like input signal order : int LPC order (the", "def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients. Return the order + 1", "using the direct inversion of the toeplitz matrix, which is really slow\"\"\" if", "int(nperseg / 2)] frqs, bw = process_frame(X, window, num_formants, new_sr) formants = []", "a[j] for j in range(1, i): a[j] += k[i-1] * np.conj(t[i-j]) e *=", "a well-known algorithm to solve the Hermitian toeplitz equation: _ _ -R[1] =", "by the size of input signal) Notes ----- The reason why we do", "<= size-1\") if not np.isreal(r[0]): raise ValueError(\"First item of input must be real.\")", "= a[j] for j in range(1, i): a[j] += k[i-1] * np.conj(t[i-j]) e", "do not use acorr directly is for speed issue.\"\"\" if not np.isrealobj(x): raise", "of O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only rank", "error. k : array-like reflection coefficients. Notes ----- This uses Levinson-Durbin recursion for", "return np.ones(1, dtype = 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently", "= np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg / 2) + 1, nperstep) num_frames", "if axis != -1: x = np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft,", "pxp matrix is defined by p items only). Generally the autocorrelation of the", "unsuitable for any serious computation. Use it as educational and reference purpose only.", "of the autocorrelation is faster: use levinson and correlate in this case.\"\"\" n", "2 * (new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def", "alpha = np.exp(-2 * np.pi * 50 * (1 / new_sr)) proc =", "signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr, num_formants,", "Notes ---- This is just for reference, as it is using the direct", "a = np.empty(order+1, 'float32') # temporary array t = np.empty(order+1, 'float32') # Reflection", "k[i-1] * np.conj(t[i-j]) e *= 1 - k[i-1] * np.conj(k[i-1]) return a, e,", "import numpy as np import scipy as sp from scipy.signal import lfilter from", "duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr = librosa.load(file_path, sr=None, mono=False)", "new_sr] = formants return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None,", "= np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft, maxlag) if axis != -1:", "size of input signal) Notes ----- The reason why we do not use", "solution of the inversion. e : array-like the prediction error. k : array-like", "respect to a ( is the complex conjugate). Using the special symmetry in", "the k+1 coefficients of a k order linear filter: xp[n] = -c[1] *", "to efficiently solve symmetric linear systems with toeplitz structure. Parameters --------- r :", "R[1] ... R[p-1] a[1] : : : : * : : : :", "* maxlag - 1)) if axis != -1: x = np.swapaxes(x, -1, axis)", "#@jit def lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return the order", "computation. For small order, particularly if order << signal size, direct computation of", "The reason why we do not use acorr directly is for speed issue.\"\"\"", "2 != 0: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg /", "j in range(order): t[j] = a[j] for j in range(1, i): a[j] +=", "formants return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output", "for reference, as it is using the direct inversion of the toeplitz matrix,", "autocorrelation estimator (divided by the size of input signal) Notes ----- The reason", "* : -R[p] = R[p-1] R[p-2] ... R[0] a[p] _ with respect to", "1 items) Notes ---- This is just for reference, as it is using", "np.conj(k[i-1]) return a, e, k #@jit def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x,", "correlate in this case.\"\"\" n = signal.shape[axis] if order > n: raise ValueError(\"Input", "= process_frame(X, window, num_formants, new_sr) formants = [] for j, f in enumerate(frqs):", "signal. c = lpc(x, k) will find the k+1 coefficients of a k", "lpc(x, k) will find the k+1 coefficients of a k order linear filter:", "new_sr) nperseg = int(win_len * new_sr) nperstep = int(time_step * new_sr) if window_shape", "np import scipy as sp from scipy.signal import lfilter from scipy.fftpack import fft,ifft", "x[i] is minimized. Parameters ---------- signal: array_like input signal order : int LPC", "nperseg = int(win_len * new_sr) nperstep = int(time_step * new_sr) if window_shape ==", "+ 1] else: window = np.hanning(nperseg + 2)[1:nperseg + 1] indices = np.arange(int(nperseg", "*= 1 - k[i-1] * np.conj(k[i-1]) return a, e, k #@jit def _acorr_last_axis(x,", "np.pi * 50 * (1 / new_sr)) proc = lfilter([1., -alpha], 1, signal)", "sp from scipy.signal import lfilter from scipy.fftpack import fft,ifft from scipy.signal import gaussian", "gaussian from ..helper import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction", "maxlag) if axis != -1: a = np.swapaxes(a, -1, axis) return a #@jit", "+ 2)[1:nperseg + 1] indices = np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg /", "sr = librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr, num_formants, max_freq, win_len, time_step)", "= lpc(x, k) will find the k+1 coefficients of a k order linear", "a, e, k #@jit def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n =", "'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return", "X = X * window A, e, k = lpc(X, num_formants*2) rts =", "- 1) / 2)[1:nperseg + 1] else: window = np.hanning(nperseg + 2)[1:nperseg +", "return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr", ">= lpc order\") if order > 0: p = order + 1 r", ": : : * : : : : _ * : -R[p] =", "k+1 coefficients of a k order linear filter: xp[n] = -c[1] * x[n-2]", "will find the k+1 coefficients of a k order linear filter: xp[n] =", "a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along", "coefficients a = np.empty(order+1, 'float32') # temporary array t = np.empty(order+1, 'float32') #", "the autocorrelation is faster: use levinson and correlate in this case.\"\"\" n =", "X * window A, e, k = lpc(X, num_formants*2) rts = np.roots(A) rts", "= r[0] for i in range(1, order+1): acc = r[i] for j in", "bw = -1 / 2 * (new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds]))", "- x[i] is minimized. Parameters ---------- signal: array_like input signal order : int", "* 50 * (1 / new_sr)) proc = lfilter([1., -alpha], 1, signal) if", "for i in range(num_frames): if nperseg % 2 != 0: X = proc[indices[i]", "* np.conj(k[i-1]) return a, e, k #@jit def _acorr_last_axis(x, nfft, maxlag): a =", "sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0] / sr return fix_time_points(output,", "/ sr return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step):", "a[p] _ with respect to a ( is the complex conjugate). Using the", "proc = librosa.resample(proc, sr, new_sr) nperseg = int(win_len * new_sr) nperstep = int(time_step", "1, signal) if sr > new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg =", "raise ValueError(\"Only rank 1 are supported for now.\") n = r.size if n", "the solution of the inversion. e : array-like the prediction error. k :", "output = {} new_sr = 2 * max_freq alpha = np.exp(-2 * np.pi", "order (the output will have order + 1 items) Returns ------- a :", "window_shape == 'gaussian': window = gaussian(nperseg + 2, 0.45 * (nperseg - 1)", "order\") r = acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr):", "time_step, win_len, window_shape='gaussian') duration = signal.shape[0] / sr return fix_time_points(output, begin, padding, duration)", "def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'): output = {}", "import fft,ifft from scipy.signal import gaussian from ..helper import fix_time_points, nextpow2 def lpc_ref(signal,", "else: return np.ones(1, dtype = 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to", "nextpow2(2 * maxlag - 1)) if axis != -1: x = np.swapaxes(x, -1,", "order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return the order + 1 LPC", "np.conj(t[i-j]) e *= 1 - k[i-1] * np.conj(k[i-1]) return a, e, k #@jit", "#@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear systems with", "1: raise ValueError(\"Array of rank > 1 not supported yet\") if order >", "2):indices[i] + int(nperseg / 2)] frqs, bw = process_frame(X, window, num_formants, new_sr) formants", "f in enumerate(frqs): if f < 50: continue if f > max_freq -", "the special symmetry in the matrix, the inversion can be done in O(p^2)", "window_shape='gaussian') duration = signal.shape[0] / sr return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path,", "sr return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig,", "input array to invert (since the matrix is symmetric Toeplitz, the corresponding pxp", "maxlag - 1)) if axis != -1: x = np.swapaxes(x, -1, axis) a", "frqs, bw def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'): output", "Reflection coefficients k = np.empty(order, 'float32') a[0] = 1. e = r[0] for", "supported for now.\") n = r.size if n < 1: raise ValueError(\"Cannot operate", "np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return a[..., :maxlag+1] / x.shape[-1] #@jit def", "Notes ----- The reason why we do not use acorr directly is for", "k order linear filter: xp[n] = -c[1] * x[n-2] - ... - c[k-1]", "ValueError(\"Complex input not supported yet\") maxlag = x.shape[axis] nfft = int(2 ** nextpow2(2", "The first item must be a non zero real. Notes ---- This implementation", "signal must have length >= order\") r = acorr_lpc(signal, axis) return levinson_1d(r, order)", "array !\") elif order > n - 1: raise ValueError(\"Order should be <=", "maxlag = x.shape[axis] nfft = int(2 ** nextpow2(2 * maxlag - 1)) if", "rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr /", "= R[p-1] R[p-2] ... R[0] a[p] _ with respect to a ( is", "R[p-1] R[p-2] ... R[0] a[p] _ with respect to a ( is the", "np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only rank 1 are supported for now.\")", "symmetric Toeplitz, the corresponding pxp matrix is defined by p items only). Generally", "elif not np.isfinite(1/r[0]): raise ValueError(\"First item should be != 0\") # Estimated coefficients", "return a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x", "range(order): t[j] = a[j] for j in range(1, i): a[j] += k[i-1] *", "zero real. Notes ---- This implementation is in python, hence unsuitable for any", "nperstep = int(time_step * new_sr) if window_shape == 'gaussian': window = gaussian(nperseg +", ": : : : _ * : -R[p] = R[p-1] R[p-2] ... R[0]", "along the given axis. This compute the biased autocorrelation estimator (divided by the", "return np.concatenate(([1.], phi)) else: return np.ones(1, dtype = 'float32') #@jit def levinson_1d(r, order):", "signal.shape[axis] if order > n: raise ValueError(\"Input signal must have length >= order\")", "k = np.empty(order, 'float32') a[0] = 1. e = r[0] for i in", "= np.zeros(p, 'float32') # Number of non zero values in autocorrelation one needs", "the biased autocorrelation estimator (divided by the size of input signal) Notes -----", "<filename>acousticsim/analysis/formants/lpc.py import librosa import numpy as np import scipy as sp from scipy.signal", "axis) return levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr): X = X *", "return frqs, bw def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'):", "of the squared-error e[i] = xp[i] - x[i] is minimized. Parameters ---------- signal:", "def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear systems with toeplitz", "symmetry in the matrix, the inversion can be done in O(p^2) instead of", "for j in range(1, i): a[j] += k[i-1] * np.conj(t[i-j]) e *= 1", "maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return a[..., :maxlag+1] /", "1 items) Returns ------- a : array-like the solution of the inversion. e", "np.real(rts)) frqs = angz * (new_sr / (2 * np.pi)) frq_inds = np.argsort(frqs)", "1 are supported for now.\") n = r.size if n < 1: raise", "is faster: use levinson and correlate in this case.\"\"\" n = signal.shape[axis] if", "{} new_sr = 2 * max_freq alpha = np.exp(-2 * np.pi * 50", "raise ValueError(\"Order should be <= size-1\") if not np.isreal(r[0]): raise ValueError(\"First item of", "t = np.empty(order+1, 'float32') # Reflection coefficients k = np.empty(order, 'float32') a[0] =", "n: raise ValueError(\"Input signal must have length >= order\") r = acorr_lpc(signal, axis)", "np.isreal(r[0]): raise ValueError(\"First item of input must be real.\") elif not np.isfinite(1/r[0]): raise", "frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw = -1 / 2 * (new_sr", "x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along the given axis.", "num_frames = len(indices) for i in range(num_frames): if nperseg % 2 != 0:", "is for speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input not supported yet\")", "> max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants) if", "c = lpc(x, k) will find the k+1 coefficients of a k order", "for any serious computation. Use it as educational and reference purpose only. Levinson", "= np.empty(order+1, 'float32') # temporary array t = np.empty(order+1, 'float32') # Reflection coefficients", "given axis. This compute the biased autocorrelation estimator (divided by the size of", "R[p-2] ... R[0] a[p] _ with respect to a ( is the complex", "p LPC # coefficients nx = np.min([p, signal.size]) x = np.correlate(signal, signal, 'full')", "signal size, direct computation of the autocorrelation is faster: use levinson and correlate", "new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg = int(win_len * new_sr) nperstep =", "as it is using the direct inversion of the toeplitz matrix, which is", "a ( is the complex conjugate). Using the special symmetry in the matrix,", "= r[i] for j in range(1, i): acc += a[j] * r[i-j] k[i-1]", "issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input not supported yet\") maxlag = x.shape[axis]", "/ new_sr] = formants return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025,", "it is using the direct inversion of the toeplitz matrix, which is really", "linear filter: xp[n] = -c[1] * x[n-2] - ... - c[k-1] * x[n-k-1]", "ValueError(\"Input signal must have a lenght >= lpc order\") if order > 0:", "return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output =", "of a k order linear filter: xp[n] = -c[1] * x[n-2] - ...", "not supported yet\") if order > signal.size: raise ValueError(\"Input signal must have a", "+ int(nperseg / 2)] frqs, bw = process_frame(X, window, num_formants, new_sr) formants =", "sig, sr = librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr, num_formants, max_freq, win_len,", "if signal.ndim > 1: raise ValueError(\"Array of rank > 1 not supported yet\")", "small order, particularly if order << signal size, direct computation of the autocorrelation", "/ 2 * (new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw", "axis != -1: x = np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft, maxlag)", "e *= 1 - k[i-1] * np.conj(k[i-1]) return a, e, k #@jit def", "items) Returns ------- a : array-like the solution of the inversion. e :", "R[0] R[1] ... R[p-1] a[1] : : : : * : : :", "signal) if sr > new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg = int(win_len", "zero values in autocorrelation one needs for p LPC # coefficients nx =", "file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr = librosa.load(file_path, sr=None, mono=False) output =", "input signal) Notes ----- The reason why we do not use acorr directly", "by p items only). Generally the autocorrelation of the signal for linear prediction", "elif order > n - 1: raise ValueError(\"Order should be <= size-1\") if", "1 LPC coefficients for the signal. c = lpc(x, k) will find the", "autocorrelation one needs for p LPC # coefficients nx = np.min([p, signal.size]) x", "k : array-like reflection coefficients. Notes ----- This uses Levinson-Durbin recursion for the", "order + 1 items) Returns ------- a : array-like the solution of the", "* max_freq alpha = np.exp(-2 * np.pi * 50 * (1 / new_sr))", ": * : : : : _ * : -R[p] = R[p-1] R[p-2]", "invert (since the matrix is symmetric Toeplitz, the corresponding pxp matrix is defined", "= 2 * max_freq alpha = np.exp(-2 * np.pi * 50 * (1", "1: raise ValueError(\"Cannot operate on empty array !\") elif order > n -", "reference, as it is using the direct inversion of the toeplitz matrix, which", "1 - k[i-1] * np.conj(k[i-1]) return a, e, k #@jit def _acorr_last_axis(x, nfft,", "is really slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array of rank > 1", "X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2) + 1]", "'float32') # temporary array t = np.empty(order+1, 'float32') # Reflection coefficients k =", "raise ValueError(\"Complex input not supported yet\") maxlag = x.shape[axis] nfft = int(2 **", "+= [(None, None)] * missing output[indices[i] / new_sr] = formants return output def", "coefficients. Notes ----- This uses Levinson-Durbin recursion for the autocorrelation matrix inversion, and", ": : * : : : : _ * : -R[p] = R[p-1]", "formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants) if missing: formants += [(None, None)]", "proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2) + 1] else: X", "can be done in O(p^2) instead of O(p^3). \"\"\" r = np.atleast_1d(r) if", "frqs = frqs[frq_inds] bw = -1 / 2 * (new_sr / (2 *", "-R[1] = R[0] R[1] ... R[p-1] a[1] : : : : * :", "= R[0] R[1] ... R[p-1] a[1] : : : : * : :", "if r.ndim > 1: raise ValueError(\"Only rank 1 are supported for now.\") n", "speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input not supported yet\") maxlag =", "only). Generally the autocorrelation of the signal for linear prediction coefficients estimation. The", "have length >= order\") r = acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X,", "np.exp(-2 * np.pi * 50 * (1 / new_sr)) proc = lfilter([1., -alpha],", "size, direct computation of the autocorrelation is faster: use levinson and correlate in", "complex conjugate). Using the special symmetry in the matrix, the inversion can be", "= 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear", "_ with respect to a ( is the complex conjugate). Using the special", "needs for p LPC # coefficients nx = np.min([p, signal.size]) x = np.correlate(signal,", "is in python, hence unsuitable for any serious computation. Use it as educational", "to a ( is the complex conjugate). Using the special symmetry in the", "supported yet\") if order > signal.size: raise ValueError(\"Input signal must have a lenght", "in range(order): t[j] = a[j] for j in range(1, i): a[j] += k[i-1]", "- k[i-1] * np.conj(k[i-1]) return a, e, k #@jit def _acorr_last_axis(x, nfft, maxlag):", "= r.size if n < 1: raise ValueError(\"Cannot operate on empty array !\")", "the complex conjugate). Using the special symmetry in the matrix, the inversion can", "n = r.size if n < 1: raise ValueError(\"Cannot operate on empty array", "def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr,", "particularly if order << signal size, direct computation of the autocorrelation is faster:", "nfft, maxlag) if axis != -1: a = np.swapaxes(a, -1, axis) return a", "r.ndim > 1: raise ValueError(\"Only rank 1 are supported for now.\") n =", "phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1, dtype = 'float32')", "+ 1 r = np.zeros(p, 'float32') # Number of non zero values in", "/ 2)] frqs, bw = process_frame(X, window, num_formants, new_sr) formants = [] for", "and correlate in this case.\"\"\" n = signal.shape[axis] if order > n: raise", "/ x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along the given", "max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0] / sr return fix_time_points(output, begin, padding,", "in range(1, i): a[j] += k[i-1] * np.conj(t[i-j]) e *= 1 - k[i-1]", "j in range(1, i): a[j] += k[i-1] * np.conj(t[i-j]) e *= 1 -", "-1: a = np.swapaxes(a, -1, axis) return a #@jit def lpc(signal, order, axis=-1):", "/ 2) + 1] else: X = proc[indices[i] - int(nperseg / 2):indices[i] +", "!= -1: a = np.swapaxes(a, -1, axis) return a #@jit def lpc(signal, order,", "matrix inversion, and fft for the autocorrelation computation. For small order, particularly if", "- 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants) if missing: formants", "* np.pi * 50 * (1 / new_sr)) proc = lfilter([1., -alpha], 1,", "padding=None): output = lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0]", "1: raise ValueError(\"Only rank 1 are supported for now.\") n = r.size if", "int LPC order (the output will have order + 1 items) Returns -------", "... R[p-1] a[1] : : : : * : : : : _", "first item must be a non zero real. Notes ---- This implementation is", "for j in range(order): t[j] = a[j] for j in range(1, i): a[j]", "a lenght >= lpc order\") if order > 0: p = order +", "return a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return", "librosa.resample(proc, sr, new_sr) nperseg = int(win_len * new_sr) nperstep = int(time_step * new_sr)", "structure. Parameters --------- r : array-like input array to invert (since the matrix", "ValueError(\"First item of input must be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item", "linear systems with toeplitz structure. Parameters --------- r : array-like input array to", "1] else: window = np.hanning(nperseg + 2)[1:nperseg + 1] indices = np.arange(int(nperseg /", "have a lenght >= lpc order\") if order > 0: p = order", "raise ValueError(\"Input signal must have length >= order\") r = acorr_lpc(signal, axis) return", "coefficients of a k order linear filter: xp[n] = -c[1] * x[n-2] -", "- int(nperseg / 2):indices[i] + int(nperseg / 2) + 1] else: X =", "num_formants - len(formants) if missing: formants += [(None, None)] * missing output[indices[i] /", "slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array of rank > 1 not supported", "python, hence unsuitable for any serious computation. Use it as educational and reference", "signal.shape[0] / sr return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len,", "(since the matrix is symmetric Toeplitz, the corresponding pxp matrix is defined by", "autocorrelation computation. For small order, particularly if order << signal size, direct computation", "(2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr, num_formants, max_freq,", "angz * (new_sr / (2 * np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds]", "num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0] / sr return fix_time_points(output, begin,", "is using the direct inversion of the toeplitz matrix, which is really slow\"\"\"", "signal) Notes ----- The reason why we do not use acorr directly is", ": array-like reflection coefficients. Notes ----- This uses Levinson-Durbin recursion for the autocorrelation", "be != 0\") # Estimated coefficients a = np.empty(order+1, 'float32') # temporary array", "* r[i-j] k[i-1] = -acc / e a[i] = k[i-1] for j in", "conjugate). Using the special symmetry in the matrix, the inversion can be done", "1] else: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2)]", "Coefficients. Return the order + 1 LPC coefficients for the signal. c =", "be <= size-1\") if not np.isreal(r[0]): raise ValueError(\"First item of input must be", "!= 0: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2)", "with toeplitz structure. Parameters --------- r : array-like input array to invert (since", "num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts))", "raise ValueError(\"Input signal must have a lenght >= lpc order\") if order >", "len(indices) for i in range(num_frames): if nperseg % 2 != 0: X =", "axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return the order + 1 LPC coefficients", "autocorrelation is faster: use levinson and correlate in this case.\"\"\" n = signal.shape[axis]", "* np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw = -1 / 2", ": _ * : -R[p] = R[p-1] R[p-2] ... R[0] a[p] _ with", "equation: _ _ -R[1] = R[0] R[1] ... R[p-1] a[1] : : :", "of input signal) Notes ----- The reason why we do not use acorr", "order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear systems with toeplitz structure. Parameters", "----- This uses Levinson-Durbin recursion for the autocorrelation matrix inversion, and fft for", "if n < 1: raise ValueError(\"Cannot operate on empty array !\") elif order", "else: window = np.hanning(nperseg + 2)[1:nperseg + 1] indices = np.arange(int(nperseg / 2),", "raise ValueError(\"First item should be != 0\") # Estimated coefficients a = np.empty(order+1,", "nperstep) num_frames = len(indices) for i in range(num_frames): if nperseg % 2 !=", "are supported for now.\") n = r.size if n < 1: raise ValueError(\"Cannot", "the corresponding pxp matrix is defined by p items only). Generally the autocorrelation", "ValueError(\"Order should be <= size-1\") if not np.isreal(r[0]): raise ValueError(\"First item of input", "np.empty(order+1, 'float32') # Reflection coefficients k = np.empty(order, 'float32') a[0] = 1. e", "* np.conj(t[i-j]) e *= 1 - k[i-1] * np.conj(k[i-1]) return a, e, k", "symmetric linear systems with toeplitz structure. Parameters --------- r : array-like input array", "O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only rank 1", "acc += a[j] * r[i-j] k[i-1] = -acc / e a[i] = k[i-1]", "_ -R[1] = R[0] R[1] ... R[p-1] a[1] : : : : *", "the autocorrelation computation. For small order, particularly if order << signal size, direct", "lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'): output = {} new_sr", "window, num_formants, new_sr) formants = [] for j, f in enumerate(frqs): if f", "a : array-like the solution of the inversion. e : array-like the prediction", "one needs for p LPC # coefficients nx = np.min([p, signal.size]) x =", "'float32') # Number of non zero values in autocorrelation one needs for p", "and reference purpose only. Levinson is a well-known algorithm to solve the Hermitian", "------- a : array-like the solution of the inversion. e : array-like the", "e : array-like the prediction error. k : array-like reflection coefficients. Notes -----", "2):indices[i] + int(nperseg / 2) + 1] else: X = proc[indices[i] - int(nperseg", "** nextpow2(2 * maxlag - 1)) if axis != -1: x = np.swapaxes(x,", "- ... - c[k-1] * x[n-k-1] Such as the sum of the squared-error", "2))) return a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of", "acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along the given axis. This compute the", "implementation is in python, hence unsuitable for any serious computation. Use it as", "** 2))) return a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation", "if not np.isreal(r[0]): raise ValueError(\"First item of input must be real.\") elif not", "instead of O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only", "coefficients nx = np.min([p, signal.size]) x = np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order]", "np.ones(1, dtype = 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve", "Returns ------- a : array-like the solution of the inversion. e : array-like", "autocorrelation matrix inversion, and fft for the autocorrelation computation. For small order, particularly", "sr > new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg = int(win_len * new_sr)", "proc = lfilter([1., -alpha], 1, signal) if sr > new_sr: proc = librosa.resample(proc,", "(nperseg - 1) / 2)[1:nperseg + 1] else: window = np.hanning(nperseg + 2)[1:nperseg", "to solve the Hermitian toeplitz equation: _ _ -R[1] = R[0] R[1] ...", "defined by p items only). Generally the autocorrelation of the signal for linear", "empty array !\") elif order > n - 1: raise ValueError(\"Order should be", "duration = signal.shape[0] / sr return fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants,", "order > n: raise ValueError(\"Input signal must have length >= order\") r =", "the squared-error e[i] = xp[i] - x[i] is minimized. Parameters ---------- signal: array_like", "= np.hanning(nperseg + 2)[1:nperseg + 1] indices = np.arange(int(nperseg / 2), proc.shape[0] -", "e a[i] = k[i-1] for j in range(order): t[j] = a[j] for j", "output will have order + 1 items) Notes ---- This is just for", "of rank > 1 not supported yet\") if order > signal.size: raise ValueError(\"Input", "e = r[0] for i in range(1, order+1): acc = r[i] for j", "just for reference, as it is using the direct inversion of the toeplitz", "= np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only rank 1 are supported for", "array_like input signal order : int LPC order (the output will have order", "proc.shape[0] - int(nperseg / 2) + 1, nperstep) num_frames = len(indices) for i", "np.zeros(p, 'float32') # Number of non zero values in autocorrelation one needs for", "axis=-1): \"\"\"Compute autocorrelation of x along the given axis. This compute the biased", "computation of the autocorrelation is faster: use levinson and correlate in this case.\"\"\"", "= lfilter([1., -alpha], 1, signal) if sr > new_sr: proc = librosa.resample(proc, sr,", "with respect to a ( is the complex conjugate). Using the special symmetry", "[] for j, f in enumerate(frqs): if f < 50: continue if f", "if sr > new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg = int(win_len *", "= np.exp(-2 * np.pi * 50 * (1 / new_sr)) proc = lfilter([1.,", "int(win_len * new_sr) nperstep = int(time_step * new_sr) if window_shape == 'gaussian': window", "directly is for speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input not supported", "toeplitz matrix, which is really slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array of", "items only). Generally the autocorrelation of the signal for linear prediction coefficients estimation.", "* new_sr) nperstep = int(time_step * new_sr) if window_shape == 'gaussian': window =", "new_sr) nperstep = int(time_step * new_sr) if window_shape == 'gaussian': window = gaussian(nperseg", "begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr = librosa.load(file_path,", "i in range(num_frames): if nperseg % 2 != 0: X = proc[indices[i] -", ":maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along the", "num_formants, new_sr): X = X * window A, e, k = lpc(X, num_formants*2)", "= order + 1 r = np.zeros(p, 'float32') # Number of non zero", "for j in range(1, i): acc += a[j] * r[i-j] k[i-1] = -acc", "+= a[j] * r[i-j] k[i-1] = -acc / e a[i] = k[i-1] for", "enumerate(frqs): if f < 50: continue if f > max_freq - 50: continue", "x along the given axis. This compute the biased autocorrelation estimator (divided by", "x.shape[axis] nfft = int(2 ** nextpow2(2 * maxlag - 1)) if axis !=", "+ 1 LPC coefficients for the signal. c = lpc(x, k) will find", "prediction error. k : array-like reflection coefficients. Notes ----- This uses Levinson-Durbin recursion", "_ _ -R[1] = R[0] R[1] ... R[p-1] a[1] : : : :", "process_frame(X, window, num_formants, new_sr): X = X * window A, e, k =", "from scipy.signal import lfilter from scipy.fftpack import fft,ifft from scipy.signal import gaussian from", "on empty array !\") elif order > n - 1: raise ValueError(\"Order should", "acc = r[i] for j in range(1, i): acc += a[j] * r[i-j]", "xp[n] = -c[1] * x[n-2] - ... - c[k-1] * x[n-k-1] Such as", "new_sr): X = X * window A, e, k = lpc(X, num_formants*2) rts", "/ (2 * np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw = -1", "= acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr): X =", "-c[1] * x[n-2] - ... - c[k-1] * x[n-k-1] Such as the sum", "k #@jit def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft) **", "signal for linear prediction coefficients estimation. The first item must be a non", "n - 1: raise ValueError(\"Order should be <= size-1\") if not np.isreal(r[0]): raise", "order > signal.size: raise ValueError(\"Input signal must have a lenght >= lpc order\")", "bw = process_frame(X, window, num_formants, new_sr) formants = [] for j, f in", "matrix, the inversion can be done in O(p^2) instead of O(p^3). \"\"\" r", "the matrix, the inversion can be done in O(p^2) instead of O(p^3). \"\"\"", "frqs[frq_inds] bw = -1 / 2 * (new_sr / (2 * np.pi)) *", "if order << signal size, direct computation of the autocorrelation is faster: use", "np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len,", "2), proc.shape[0] - int(nperseg / 2) + 1, nperstep) num_frames = len(indices) for", "order << signal size, direct computation of the autocorrelation is faster: use levinson", "- 1)) if axis != -1: x = np.swapaxes(x, -1, axis) a =", "padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr = librosa.load(file_path, sr=None,", "done in O(p^2) instead of O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim >", "\"\"\"Compute autocorrelation of x along the given axis. This compute the biased autocorrelation", "well-known algorithm to solve the Hermitian toeplitz equation: _ _ -R[1] = R[0]", "inversion, and fft for the autocorrelation computation. For small order, particularly if order", "inversion of the toeplitz matrix, which is really slow\"\"\" if signal.ndim > 1:", "order + 1 r = np.zeros(p, 'float32') # Number of non zero values", "lfilter from scipy.fftpack import fft,ifft from scipy.signal import gaussian from ..helper import fix_time_points,", "toeplitz equation: _ _ -R[1] = R[0] R[1] ... R[p-1] a[1] : :", "output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal,", "2) + 1] else: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg", "= gaussian(nperseg + 2, 0.45 * (nperseg - 1) / 2)[1:nperseg + 1]", "np.empty(order+1, 'float32') # temporary array t = np.empty(order+1, 'float32') # Reflection coefficients k", "acorr directly is for speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input not", "axis) return a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients.", "recursion, to efficiently solve symmetric linear systems with toeplitz structure. Parameters --------- r", "0\") # Estimated coefficients a = np.empty(order+1, 'float32') # temporary array t =", "* (nperseg - 1) / 2)[1:nperseg + 1] else: window = np.hanning(nperseg +", "= xp[i] - x[i] is minimized. Parameters ---------- signal: array_like input signal order", "1)) if axis != -1: x = np.swapaxes(x, -1, axis) a = _acorr_last_axis(x,", "* np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape", "This compute the biased autocorrelation estimator (divided by the size of input signal)", "x = np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft, maxlag) if axis !=", "e, k = lpc(X, num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)]", "= 'gaussian'): output = {} new_sr = 2 * max_freq alpha = np.exp(-2", "= np.min([p, signal.size]) x = np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi =", "= -acc / e a[i] = k[i-1] for j in range(order): t[j] =", "biased autocorrelation estimator (divided by the size of input signal) Notes ----- The", "which is really slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array of rank >", "Linear Prediction Coefficients. Return the order + 1 LPC coefficients for the signal.", "why we do not use acorr directly is for speed issue.\"\"\" if not", "nperseg % 2 != 0: X = proc[indices[i] - int(nperseg / 2):indices[i] +", "missing: formants += [(None, None)] * missing output[indices[i] / new_sr] = formants return", "(the output will have order + 1 items) Returns ------- a : array-like", "1: raise ValueError(\"Order should be <= size-1\") if not np.isreal(r[0]): raise ValueError(\"First item", "input must be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item should be !=", "window, num_formants, new_sr): X = X * window A, e, k = lpc(X,", "int LPC order (the output will have order + 1 items) Notes ----", "for linear prediction coefficients estimation. The first item must be a non zero", "is the complex conjugate). Using the special symmetry in the matrix, the inversion", "numpy as np import scipy as sp from scipy.signal import lfilter from scipy.fftpack", "* (new_sr / (2 * np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw", "* : : : : _ * : -R[p] = R[p-1] R[p-2] ...", "a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return the", "+ 2, 0.45 * (nperseg - 1) / 2)[1:nperseg + 1] else: window", "-R[p] = R[p-1] R[p-2] ... R[0] a[p] _ with respect to a (", "in range(1, order+1): acc = r[i] for j in range(1, i): acc +=", "to invert (since the matrix is symmetric Toeplitz, the corresponding pxp matrix is", "-1 / 2 * (new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs,", "* new_sr) if window_shape == 'gaussian': window = gaussian(nperseg + 2, 0.45 *", "= librosa.resample(proc, sr, new_sr) nperseg = int(win_len * new_sr) nperstep = int(time_step *", "2)[1:nperseg + 1] else: window = np.hanning(nperseg + 2)[1:nperseg + 1] indices =", "the size of input signal) Notes ----- The reason why we do not", "< 1: raise ValueError(\"Cannot operate on empty array !\") elif order > n", "array-like the prediction error. k : array-like reflection coefficients. Notes ----- This uses", "j in range(1, i): acc += a[j] * r[i-j] k[i-1] = -acc /", "else: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2)] frqs,", "win_len, time_step): sig, sr = librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr, num_formants,", "a[i] = k[i-1] for j in range(order): t[j] = a[j] for j in", "#@jit def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along the given axis. This", "max_freq, time_step, win_len, window_shape = 'gaussian'): output = {} new_sr = 2 *", "= 1. e = r[0] for i in range(1, order+1): acc = r[i]", "= formants return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None):", "\"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear systems with toeplitz structure. Parameters ---------", "r[0] for i in range(1, order+1): acc = r[i] for j in range(1,", "any serious computation. Use it as educational and reference purpose only. Levinson is", "LPC # coefficients nx = np.min([p, signal.size]) x = np.correlate(signal, signal, 'full') r[:nx]", "signal must have a lenght >= lpc order\") if order > 0: p", "= angz * (new_sr / (2 * np.pi)) frq_inds = np.argsort(frqs) frqs =", "must be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item should be != 0\")", "/ 2)[1:nperseg + 1] else: window = np.hanning(nperseg + 2)[1:nperseg + 1] indices", "frqs, bw = process_frame(X, window, num_formants, new_sr) formants = [] for j, f", "np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz", "raise ValueError(\"Array of rank > 1 not supported yet\") if order > signal.size:", "Parameters --------- r : array-like input array to invert (since the matrix is", "signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else:", "= x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1, dtype", "np.isrealobj(x): raise ValueError(\"Complex input not supported yet\") maxlag = x.shape[axis] nfft = int(2", "/ (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr, num_formants,", "int(time_step * new_sr) if window_shape == 'gaussian': window = gaussian(nperseg + 2, 0.45", "Use it as educational and reference purpose only. Levinson is a well-known algorithm", "continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants) if missing: formants += [(None,", "int(2 ** nextpow2(2 * maxlag - 1)) if axis != -1: x =", "= [] for j, f in enumerate(frqs): if f < 50: continue if", "range(1, i): acc += a[j] * r[i-j] k[i-1] = -acc / e a[i]", "range(1, i): a[j] += k[i-1] * np.conj(t[i-j]) e *= 1 - k[i-1] *", "'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear systems", "reference purpose only. Levinson is a well-known algorithm to solve the Hermitian toeplitz", "import scipy as sp from scipy.signal import lfilter from scipy.fftpack import fft,ifft from", "/ 2) + 1, nperstep) num_frames = len(indices) for i in range(num_frames): if", "[(None, None)] * missing output[indices[i] / new_sr] = formants return output def signal_to_formants(signal,", "i): acc += a[j] * r[i-j] k[i-1] = -acc / e a[i] =", "we do not use acorr directly is for speed issue.\"\"\" if not np.isrealobj(x):", "fix_time_points(output, begin, padding, duration) def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr =", "50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants) if missing: formants +=", "= -1 / 2 * (new_sr / (2 * np.pi)) * np.log(np.abs(rts[frq_inds])) return", "order+1): acc = r[i] for j in range(1, i): acc += a[j] *", "time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian')", "/ e a[i] = k[i-1] for j in range(order): t[j] = a[j] for", "acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr): X = X", "formants += [(None, None)] * missing output[indices[i] / new_sr] = formants return output", "1] indices = np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg / 2) + 1,", "not np.isreal(r[0]): raise ValueError(\"First item of input must be real.\") elif not np.isfinite(1/r[0]):", "Generally the autocorrelation of the signal for linear prediction coefficients estimation. The first", "= np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1, dtype = 'float32') #@jit", "have order + 1 items) Notes ---- This is just for reference, as", "inversion can be done in O(p^2) instead of O(p^3). \"\"\" r = np.atleast_1d(r)", "(1 / new_sr)) proc = lfilter([1., -alpha], 1, signal) if sr > new_sr:", "-1, axis) return a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction", "must be a non zero real. Notes ---- This implementation is in python,", "+ 1 items) Returns ------- a : array-like the solution of the inversion.", "of the inversion. e : array-like the prediction error. k : array-like reflection", "if order > n: raise ValueError(\"Input signal must have length >= order\") r", "= num_formants - len(formants) if missing: formants += [(None, None)] * missing output[indices[i]", "time_step): sig, sr = librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr, num_formants, max_freq,", "> 1 not supported yet\") if order > signal.size: raise ValueError(\"Input signal must", "uses Levinson-Durbin recursion for the autocorrelation matrix inversion, and fft for the autocorrelation", "max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len,", "new_sr) if window_shape == 'gaussian': window = gaussian(nperseg + 2, 0.45 * (nperseg", "axis. This compute the biased autocorrelation estimator (divided by the size of input", "levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric linear systems with toeplitz structure.", "(new_sr / (2 * np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw =", "This uses Levinson-Durbin recursion for the autocorrelation matrix inversion, and fft for the", "> new_sr: proc = librosa.resample(proc, sr, new_sr) nperseg = int(win_len * new_sr) nperstep", "special symmetry in the matrix, the inversion can be done in O(p^2) instead", "if order > 0: p = order + 1 r = np.zeros(p, 'float32')", "#@jit def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft) ** 2)))", "in O(p^2) instead of O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim > 1:", "return a, e, k #@jit def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n", "ValueError(\"Input signal must have length >= order\") r = acorr_lpc(signal, axis) return levinson_1d(r,", "- 1: raise ValueError(\"Order should be <= size-1\") if not np.isreal(r[0]): raise ValueError(\"First", "algorithm to solve the Hermitian toeplitz equation: _ _ -R[1] = R[0] R[1]", "-1, axis) a = _acorr_last_axis(x, nfft, maxlag) if axis != -1: a =", "from ..helper import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients.", "= {} new_sr = 2 * max_freq alpha = np.exp(-2 * np.pi *", "f > max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants)", "r : array-like input array to invert (since the matrix is symmetric Toeplitz,", "2)] frqs, bw = process_frame(X, window, num_formants, new_sr) formants = [] for j,", "the signal for linear prediction coefficients estimation. The first item must be a", "R[p-1] a[1] : : : : * : : : : _ *", "order + 1 LPC coefficients for the signal. c = lpc(x, k) will", "fft,ifft from scipy.signal import gaussian from ..helper import fix_time_points, nextpow2 def lpc_ref(signal, order):", "xp[i] - x[i] is minimized. Parameters ---------- signal: array_like input signal order :", "50: continue if f > max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing =", "lenght >= lpc order\") if order > 0: p = order + 1", "new_sr)) proc = lfilter([1., -alpha], 1, signal) if sr > new_sr: proc =", "must have length >= order\") r = acorr_lpc(signal, axis) return levinson_1d(r, order) def", "a = _acorr_last_axis(x, nfft, maxlag) if axis != -1: a = np.swapaxes(a, -1,", "matrix is symmetric Toeplitz, the corresponding pxp matrix is defined by p items", "nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients. Return the order +", "item of input must be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item should", "not np.isfinite(1/r[0]): raise ValueError(\"First item should be != 0\") # Estimated coefficients a", "length >= order\") r = acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X, window,", "is symmetric Toeplitz, the corresponding pxp matrix is defined by p items only).", "'float32') a[0] = 1. e = r[0] for i in range(1, order+1): acc", "of input must be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item should be", "Such as the sum of the squared-error e[i] = xp[i] - x[i] is", "of x along the given axis. This compute the biased autocorrelation estimator (divided", "output[indices[i] / new_sr] = formants return output def signal_to_formants(signal, sr, num_formants=5, max_freq=5000, time_step=0.01,", "as educational and reference purpose only. Levinson is a well-known algorithm to solve", "the inversion. e : array-like the prediction error. k : array-like reflection coefficients.", "----- The reason why we do not use acorr directly is for speed", "> n - 1: raise ValueError(\"Order should be <= size-1\") if not np.isreal(r[0]):", "< 50: continue if f > max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing", "formants = [] for j, f in enumerate(frqs): if f < 50: continue", "input signal order : int LPC order (the output will have order +", "+ 1, nperstep) num_frames = len(indices) for i in range(num_frames): if nperseg %", "if nperseg % 2 != 0: X = proc[indices[i] - int(nperseg / 2):indices[i]", "= signal.shape[axis] if order > n: raise ValueError(\"Input signal must have length >=", "for the signal. c = lpc(x, k) will find the k+1 coefficients of", "> 0: p = order + 1 r = np.zeros(p, 'float32') # Number", "the signal. c = lpc(x, k) will find the k+1 coefficients of a", "= np.empty(order, 'float32') a[0] = 1. e = r[0] for i in range(1,", "r[i-j] k[i-1] = -acc / e a[i] = k[i-1] for j in range(order):", "np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft, maxlag) if axis != -1: a", "order : int LPC order (the output will have order + 1 items)", "<< signal size, direct computation of the autocorrelation is faster: use levinson and", "use levinson and correlate in this case.\"\"\" n = signal.shape[axis] if order >", "= proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2) + 1] else:", "k[i-1] * np.conj(k[i-1]) return a, e, k #@jit def _acorr_last_axis(x, nfft, maxlag): a", "real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First item should be != 0\") # Estimated", "this case.\"\"\" n = signal.shape[axis] if order > n: raise ValueError(\"Input signal must", "nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return a[..., :maxlag+1]", "% 2 != 0: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg", "1 r = np.zeros(p, 'float32') # Number of non zero values in autocorrelation", "for j, f in enumerate(frqs): if f < 50: continue if f >", "= X * window A, e, k = lpc(X, num_formants*2) rts = np.roots(A)", "2)[1:nperseg + 1] indices = np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg / 2)", "1, nperstep) num_frames = len(indices) for i in range(num_frames): if nperseg % 2", "array-like input array to invert (since the matrix is symmetric Toeplitz, the corresponding", "the order + 1 LPC coefficients for the signal. c = lpc(x, k)", "---- This is just for reference, as it is using the direct inversion", "autocorrelation of x along the given axis. This compute the biased autocorrelation estimator", "not use acorr directly is for speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex", "np.argsort(frqs) frqs = frqs[frq_inds] bw = -1 / 2 * (new_sr / (2", "num_formants, new_sr) formants = [] for j, f in enumerate(frqs): if f <", "lpc(X, num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts),", "n = nfft) ** 2))) return a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x,", "is just for reference, as it is using the direct inversion of the", "phi)) else: return np.ones(1, dtype = 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion,", "/ 2), proc.shape[0] - int(nperseg / 2) + 1, nperstep) num_frames = len(indices)", "p = order + 1 r = np.zeros(p, 'float32') # Number of non", "> 1: raise ValueError(\"Array of rank > 1 not supported yet\") if order", "> 1: raise ValueError(\"Only rank 1 are supported for now.\") n = r.size", "* window A, e, k = lpc(X, num_formants*2) rts = np.roots(A) rts =", "num_formants, max_freq, win_len, time_step): sig, sr = librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig,", "r = np.zeros(p, 'float32') # Number of non zero values in autocorrelation one", "linear prediction coefficients estimation. The first item must be a non zero real.", "max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants - len(formants) if missing:", ": array-like the solution of the inversion. e : array-like the prediction error.", "2) + 1, nperstep) num_frames = len(indices) for i in range(num_frames): if nperseg", "filter: xp[n] = -c[1] * x[n-2] - ... - c[k-1] * x[n-k-1] Such", "as np import scipy as sp from scipy.signal import lfilter from scipy.fftpack import", "2 * max_freq alpha = np.exp(-2 * np.pi * 50 * (1 /", "frqs = angz * (new_sr / (2 * np.pi)) frq_inds = np.argsort(frqs) frqs", "array t = np.empty(order+1, 'float32') # Reflection coefficients k = np.empty(order, 'float32') a[0]", "---- This implementation is in python, hence unsuitable for any serious computation. Use", "signal.ndim > 1: raise ValueError(\"Array of rank > 1 not supported yet\") if", "/ 2):indices[i] + int(nperseg / 2) + 1] else: X = proc[indices[i] -", ": : _ * : -R[p] = R[p-1] R[p-2] ... R[0] a[p] _", "O(p^2) instead of O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim > 1: raise", "process_frame(X, window, num_formants, new_sr) formants = [] for j, f in enumerate(frqs): if", "1 not supported yet\") if order > signal.size: raise ValueError(\"Input signal must have", "for the autocorrelation computation. For small order, particularly if order << signal size,", "np.min([p, signal.size]) x = np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])),", "= -c[1] * x[n-2] - ... - c[k-1] * x[n-k-1] Such as the", "yet\") if order > signal.size: raise ValueError(\"Input signal must have a lenght >=", "/ 2):indices[i] + int(nperseg / 2)] frqs, bw = process_frame(X, window, num_formants, new_sr)", "axis) a = _acorr_last_axis(x, nfft, maxlag) if axis != -1: a = np.swapaxes(a,", "LPC order (the output will have order + 1 items) Notes ---- This", "systems with toeplitz structure. Parameters --------- r : array-like input array to invert", "as sp from scipy.signal import lfilter from scipy.fftpack import fft,ifft from scipy.signal import", "order linear filter: xp[n] = -c[1] * x[n-2] - ... - c[k-1] *", "-acc / e a[i] = k[i-1] for j in range(order): t[j] = a[j]", "educational and reference purpose only. Levinson is a well-known algorithm to solve the", "librosa import numpy as np import scipy as sp from scipy.signal import lfilter", "serious computation. Use it as educational and reference purpose only. Levinson is a", "(divided by the size of input signal) Notes ----- The reason why we", "is minimized. Parameters ---------- signal: array_like input signal order : int LPC order", "np.concatenate(([1.], phi)) else: return np.ones(1, dtype = 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin", "r[i] for j in range(1, i): acc += a[j] * r[i-j] k[i-1] =", "i): a[j] += k[i-1] * np.conj(t[i-j]) e *= 1 - k[i-1] * np.conj(k[i-1])", "- int(nperseg / 2) + 1, nperstep) num_frames = len(indices) for i in", ": : : _ * : -R[p] = R[p-1] R[p-2] ... R[0] a[p]", "r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1,", ": int LPC order (the output will have order + 1 items) Returns", "... R[0] a[p] _ with respect to a ( is the complex conjugate).", "in range(1, i): acc += a[j] * r[i-j] k[i-1] = -acc / e", "A, e, k = lpc(X, num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts) >=", "in python, hence unsuitable for any serious computation. Use it as educational and", "order, particularly if order << signal size, direct computation of the autocorrelation is", "'gaussian'): output = {} new_sr = 2 * max_freq alpha = np.exp(-2 *", ": array-like input array to invert (since the matrix is symmetric Toeplitz, the", "n = signal.shape[axis] if order > n: raise ValueError(\"Input signal must have length", "rts = np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs", "continue if f > max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants", "real. Notes ---- This implementation is in python, hence unsuitable for any serious", "t[j] = a[j] for j in range(1, i): a[j] += k[i-1] * np.conj(t[i-j])", "order): \"\"\"Compute the Linear Prediction Coefficients. Return the order + 1 LPC coefficients", "it as educational and reference purpose only. Levinson is a well-known algorithm to", "items) Notes ---- This is just for reference, as it is using the", "the autocorrelation of the signal for linear prediction coefficients estimation. The first item", "for now.\") n = r.size if n < 1: raise ValueError(\"Cannot operate on", "if f > max_freq - 50: continue formants.append((np.asscalar(f), np.asscalar(bw[j]))) missing = num_formants -", "dtype = 'float32') #@jit def levinson_1d(r, order): \"\"\"Levinson-Durbin recursion, to efficiently solve symmetric", "the Hermitian toeplitz equation: _ _ -R[1] = R[0] R[1] ... R[p-1] a[1]", "new_sr) formants = [] for j, f in enumerate(frqs): if f < 50:", "raise ValueError(\"First item of input must be real.\") elif not np.isfinite(1/r[0]): raise ValueError(\"First", "50 * (1 / new_sr)) proc = lfilter([1., -alpha], 1, signal) if sr", "lpc order\") if order > 0: p = order + 1 r =", "k[i-1] = -acc / e a[i] = k[i-1] for j in range(order): t[j]", "array-like the solution of the inversion. e : array-like the prediction error. k", "the prediction error. k : array-like reflection coefficients. Notes ----- This uses Levinson-Durbin", "int(nperseg / 2):indices[i] + int(nperseg / 2)] frqs, bw = process_frame(X, window, num_formants,", "-1: x = np.swapaxes(x, -1, axis) a = _acorr_last_axis(x, nfft, maxlag) if axis", "signal order : int LPC order (the output will have order + 1", "np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi))", "the toeplitz matrix, which is really slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array", "coefficients k = np.empty(order, 'float32') a[0] = 1. e = r[0] for i", "size-1\") if not np.isreal(r[0]): raise ValueError(\"First item of input must be real.\") elif", "estimator (divided by the size of input signal) Notes ----- The reason why", "time_step, win_len, window_shape = 'gaussian'): output = {} new_sr = 2 * max_freq", "= int(win_len * new_sr) nperstep = int(time_step * new_sr) if window_shape == 'gaussian':", "output will have order + 1 items) Returns ------- a : array-like the", "estimation. The first item must be a non zero real. Notes ---- This", "+= k[i-1] * np.conj(t[i-j]) e *= 1 - k[i-1] * np.conj(k[i-1]) return a,", "compute the biased autocorrelation estimator (divided by the size of input signal) Notes", "import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients. Return the", "find the k+1 coefficients of a k order linear filter: xp[n] = -c[1]", "yet\") maxlag = x.shape[axis] nfft = int(2 ** nextpow2(2 * maxlag - 1))", "* missing output[indices[i] / new_sr] = formants return output def signal_to_formants(signal, sr, num_formants=5,", "operate on empty array !\") elif order > n - 1: raise ValueError(\"Order", "signal.size]) x = np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:])", "p items only). Generally the autocorrelation of the signal for linear prediction coefficients", "nfft) ** 2))) return a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1): \"\"\"Compute", "lfilter([1., -alpha], 1, signal) if sr > new_sr: proc = librosa.resample(proc, sr, new_sr)", "for p LPC # coefficients nx = np.min([p, signal.size]) x = np.correlate(signal, signal,", "coefficients estimation. The first item must be a non zero real. Notes ----", "for the autocorrelation matrix inversion, and fft for the autocorrelation computation. For small", "rank 1 are supported for now.\") n = r.size if n < 1:", "scipy.signal import gaussian from ..helper import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the", "order\") if order > 0: p = order + 1 r = np.zeros(p,", "is a well-known algorithm to solve the Hermitian toeplitz equation: _ _ -R[1]", "a[1] : : : : * : : : : _ * :", "LPC order (the output will have order + 1 items) Returns ------- a", "range(1, order+1): acc = r[i] for j in range(1, i): acc += a[j]", "x[n-k-1] Such as the sum of the squared-error e[i] = xp[i] - x[i]", "... - c[k-1] * x[n-k-1] Such as the sum of the squared-error e[i]", "hence unsuitable for any serious computation. Use it as educational and reference purpose", "window_shape = 'gaussian'): output = {} new_sr = 2 * max_freq alpha =", "scipy.fftpack import fft,ifft from scipy.signal import gaussian from ..helper import fix_time_points, nextpow2 def", "direct inversion of the toeplitz matrix, which is really slow\"\"\" if signal.ndim >", "signal.size: raise ValueError(\"Input signal must have a lenght >= lpc order\") if order", "np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1, dtype = 'float32') #@jit def", "array-like reflection coefficients. Notes ----- This uses Levinson-Durbin recursion for the autocorrelation matrix", ">= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr / (2", "if order > signal.size: raise ValueError(\"Input signal must have a lenght >= lpc", "np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw = -1 / 2 *", "coefficients for the signal. c = lpc(x, k) will find the k+1 coefficients", "= np.empty(order+1, 'float32') # Reflection coefficients k = np.empty(order, 'float32') a[0] = 1.", "j, f in enumerate(frqs): if f < 50: continue if f > max_freq", "sr, num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr, num_formants, max_freq,", "_ * : -R[p] = R[p-1] R[p-2] ... R[0] a[p] _ with respect", "nfft = int(2 ** nextpow2(2 * maxlag - 1)) if axis != -1:", "e, k #@jit def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft)", "0: p = order + 1 r = np.zeros(p, 'float32') # Number of", "This implementation is in python, hence unsuitable for any serious computation. Use it", "a = np.swapaxes(a, -1, axis) return a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute", "ValueError(\"First item should be != 0\") # Estimated coefficients a = np.empty(order+1, 'float32')", "proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2)] frqs, bw = process_frame(X,", "* x[n-2] - ... - c[k-1] * x[n-k-1] Such as the sum of", "begin=None, padding=None): output = lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration =", "order + 1 items) Notes ---- This is just for reference, as it", "x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1, dtype =", "* (1 / new_sr)) proc = lfilter([1., -alpha], 1, signal) if sr >", "Levinson is a well-known algorithm to solve the Hermitian toeplitz equation: _ _", "-r[1:]) return np.concatenate(([1.], phi)) else: return np.ones(1, dtype = 'float32') #@jit def levinson_1d(r,", "= np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs =", "= rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr", "= np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return np.concatenate(([1.],", "# Number of non zero values in autocorrelation one needs for p LPC", "output = lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0] /", "bw def lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'): output =", "of non zero values in autocorrelation one needs for p LPC # coefficients", "as the sum of the squared-error e[i] = xp[i] - x[i] is minimized.", "+ 1 items) Notes ---- This is just for reference, as it is", "For small order, particularly if order << signal size, direct computation of the", "win_len, window_shape = 'gaussian'): output = {} new_sr = 2 * max_freq alpha", "\"\"\"Compute the Linear Prediction Coefficients. Return the order + 1 LPC coefficients for", "int(nperseg / 2):indices[i] + int(nperseg / 2) + 1] else: X = proc[indices[i]", "# coefficients nx = np.min([p, signal.size]) x = np.correlate(signal, signal, 'full') r[:nx] =", "faster: use levinson and correlate in this case.\"\"\" n = signal.shape[axis] if order", "== 'gaussian': window = gaussian(nperseg + 2, 0.45 * (nperseg - 1) /", "\"\"\" r = np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only rank 1 are", "k[i-1] for j in range(order): t[j] = a[j] for j in range(1, i):", "sr, new_sr) nperseg = int(win_len * new_sr) nperstep = int(time_step * new_sr) if", "sum of the squared-error e[i] = xp[i] - x[i] is minimized. Parameters ----------", "angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr / (2 * np.pi))", "levinson and correlate in this case.\"\"\" n = signal.shape[axis] if order > n:", "in this case.\"\"\" n = signal.shape[axis] if order > n: raise ValueError(\"Input signal", "window A, e, k = lpc(X, num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts)", "lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0] / sr return", "librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr, num_formants, max_freq, win_len, time_step) return output", "scipy.signal import lfilter from scipy.fftpack import fft,ifft from scipy.signal import gaussian from ..helper", "of the signal for linear prediction coefficients estimation. The first item must be", "solve symmetric linear systems with toeplitz structure. Parameters --------- r : array-like input", "1) / 2)[1:nperseg + 1] else: window = np.hanning(nperseg + 2)[1:nperseg + 1]", "matrix is defined by p items only). Generally the autocorrelation of the signal", "order (the output will have order + 1 items) Notes ---- This is", "> n: raise ValueError(\"Input signal must have length >= order\") r = acorr_lpc(signal,", "0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz * (new_sr / (2 *", "win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration", "> signal.size: raise ValueError(\"Input signal must have a lenght >= lpc order\") if", "x = np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi = np.dot(sp.linalg.inv(sp.linalg.toeplitz(r[:-1])), -r[1:]) return", "new_sr = 2 * max_freq alpha = np.exp(-2 * np.pi * 50 *", "a non zero real. Notes ---- This implementation is in python, hence unsuitable", "num_formants, max_freq, time_step, win_len, window_shape = 'gaussian'): output = {} new_sr = 2", "2, 0.45 * (nperseg - 1) / 2)[1:nperseg + 1] else: window =", "temporary array t = np.empty(order+1, 'float32') # Reflection coefficients k = np.empty(order, 'float32')", "fft for the autocorrelation computation. For small order, particularly if order << signal", "not np.isrealobj(x): raise ValueError(\"Complex input not supported yet\") maxlag = x.shape[axis] nfft =", "np.empty(order, 'float32') a[0] = 1. e = r[0] for i in range(1, order+1):", "i in range(1, order+1): acc = r[i] for j in range(1, i): acc", "range(num_frames): if nperseg % 2 != 0: X = proc[indices[i] - int(nperseg /", "e[i] = xp[i] - x[i] is minimized. Parameters ---------- signal: array_like input signal", "case.\"\"\" n = signal.shape[axis] if order > n: raise ValueError(\"Input signal must have", "# temporary array t = np.empty(order+1, 'float32') # Reflection coefficients k = np.empty(order,", "return levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr): X = X * window", "scipy as sp from scipy.signal import lfilter from scipy.fftpack import fft,ifft from scipy.signal", "ValueError(\"Array of rank > 1 not supported yet\") if order > signal.size: raise", "= nfft) ** 2))) return a[..., :maxlag+1] / x.shape[-1] #@jit def acorr_lpc(x, axis=-1):", "* np.pi)) * np.log(np.abs(rts[frq_inds])) return frqs, bw def lpc_formants(signal, sr, num_formants, max_freq, time_step,", "k) will find the k+1 coefficients of a k order linear filter: xp[n]", ": int LPC order (the output will have order + 1 items) Notes", "and fft for the autocorrelation computation. For small order, particularly if order <<", "recursion for the autocorrelation matrix inversion, and fft for the autocorrelation computation. For", "np.asscalar(bw[j]))) missing = num_formants - len(formants) if missing: formants += [(None, None)] *", "is defined by p items only). Generally the autocorrelation of the signal for", "the inversion can be done in O(p^2) instead of O(p^3). \"\"\" r =", "solve the Hermitian toeplitz equation: _ _ -R[1] = R[0] R[1] ... R[p-1]", "if not np.isrealobj(x): raise ValueError(\"Complex input not supported yet\") maxlag = x.shape[axis] nfft", "+ 1] indices = np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg / 2) +", "if missing: formants += [(None, None)] * missing output[indices[i] / new_sr] = formants", "inversion. e : array-like the prediction error. k : array-like reflection coefficients. Notes", "def file_to_formants(file_path, num_formants, max_freq, win_len, time_step): sig, sr = librosa.load(file_path, sr=None, mono=False) output", "rank > 1 not supported yet\") if order > signal.size: raise ValueError(\"Input signal", "k = lpc(X, num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz", "non zero real. Notes ---- This implementation is in python, hence unsuitable for", "should be != 0\") # Estimated coefficients a = np.empty(order+1, 'float32') # temporary", "Notes ----- This uses Levinson-Durbin recursion for the autocorrelation matrix inversion, and fft", "computation. Use it as educational and reference purpose only. Levinson is a well-known", "item should be != 0\") # Estimated coefficients a = np.empty(order+1, 'float32') #", "- len(formants) if missing: formants += [(None, None)] * missing output[indices[i] / new_sr]", "in autocorrelation one needs for p LPC # coefficients nx = np.min([p, signal.size])", "Return the order + 1 LPC coefficients for the signal. c = lpc(x,", ": -R[p] = R[p-1] R[p-2] ... R[0] a[p] _ with respect to a", "should be <= size-1\") if not np.isreal(r[0]): raise ValueError(\"First item of input must", "direct computation of the autocorrelation is faster: use levinson and correlate in this", ">= order\") r = acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X, window, num_formants,", "lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return the order + 1", "rts = rts[np.where(np.imag(rts) >= 0)] angz = np.arctan2(np.imag(rts), np.real(rts)) frqs = angz *", "order > n - 1: raise ValueError(\"Order should be <= size-1\") if not", "window = gaussian(nperseg + 2, 0.45 * (nperseg - 1) / 2)[1:nperseg +", "win_len, window_shape='gaussian') duration = signal.shape[0] / sr return fix_time_points(output, begin, padding, duration) def", "--------- r : array-like input array to invert (since the matrix is symmetric", "num_formants=5, max_freq=5000, time_step=0.01, win_len=0.025, begin=None, padding=None): output = lpc_formants(signal, sr, num_formants, max_freq, time_step,", "values in autocorrelation one needs for p LPC # coefficients nx = np.min([p,", "ValueError(\"Only rank 1 are supported for now.\") n = r.size if n <", "ValueError(\"Cannot operate on empty array !\") elif order > n - 1: raise", "= int(time_step * new_sr) if window_shape == 'gaussian': window = gaussian(nperseg + 2,", "(2 * np.pi)) frq_inds = np.argsort(frqs) frqs = frqs[frq_inds] bw = -1 /", "for speed issue.\"\"\" if not np.isrealobj(x): raise ValueError(\"Complex input not supported yet\") maxlag", "..helper import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear Prediction Coefficients. Return", "---------- signal: array_like input signal order : int LPC order (the output will", "= np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return a[..., :maxlag+1] / x.shape[-1] #@jit", "the given axis. This compute the biased autocorrelation estimator (divided by the size", "Parameters ---------- signal: array_like input signal order : int LPC order (the output", "only. Levinson is a well-known algorithm to solve the Hermitian toeplitz equation: _", "Using the special symmetry in the matrix, the inversion can be done in", "r = np.atleast_1d(r) if r.ndim > 1: raise ValueError(\"Only rank 1 are supported", "order > 0: p = order + 1 r = np.zeros(p, 'float32') #", "a k order linear filter: xp[n] = -c[1] * x[n-2] - ... -", "a[j] += k[i-1] * np.conj(t[i-j]) e *= 1 - k[i-1] * np.conj(k[i-1]) return", "def process_frame(X, window, num_formants, new_sr): X = X * window A, e, k", "signal: array_like input signal order : int LPC order (the output will have", "prediction coefficients estimation. The first item must be a non zero real. Notes", "have order + 1 items) Returns ------- a : array-like the solution of", "!\") elif order > n - 1: raise ValueError(\"Order should be <= size-1\")", "+ int(nperseg / 2) + 1] else: X = proc[indices[i] - int(nperseg /", "missing = num_formants - len(formants) if missing: formants += [(None, None)] * missing", "Notes ---- This implementation is in python, hence unsuitable for any serious computation.", "np.isfinite(1/r[0]): raise ValueError(\"First item should be != 0\") # Estimated coefficients a =", "= len(indices) for i in range(num_frames): if nperseg % 2 != 0: X", "import gaussian from ..helper import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute the Linear", "reflection coefficients. Notes ----- This uses Levinson-Durbin recursion for the autocorrelation matrix inversion,", "'float32') # Reflection coefficients k = np.empty(order, 'float32') a[0] = 1. e =", "be a non zero real. Notes ---- This implementation is in python, hence", "reason why we do not use acorr directly is for speed issue.\"\"\" if", "be done in O(p^2) instead of O(p^3). \"\"\" r = np.atleast_1d(r) if r.ndim", "if f < 50: continue if f > max_freq - 50: continue formants.append((np.asscalar(f),", "raise ValueError(\"Cannot operate on empty array !\") elif order > n - 1:", "int(nperseg / 2) + 1, nperstep) num_frames = len(indices) for i in range(num_frames):", "input not supported yet\") maxlag = x.shape[axis] nfft = int(2 ** nextpow2(2 *", ": array-like the prediction error. k : array-like reflection coefficients. Notes ----- This", "squared-error e[i] = xp[i] - x[i] is minimized. Parameters ---------- signal: array_like input", "corresponding pxp matrix is defined by p items only). Generally the autocorrelation of", "import lfilter from scipy.fftpack import fft,ifft from scipy.signal import gaussian from ..helper import", "- int(nperseg / 2):indices[i] + int(nperseg / 2)] frqs, bw = process_frame(X, window,", "( is the complex conjugate). Using the special symmetry in the matrix, the", "nx = np.min([p, signal.size]) x = np.correlate(signal, signal, 'full') r[:nx] = x[signal.size-1:signal.size+order] phi", "_acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return a[...,", "= int(2 ** nextpow2(2 * maxlag - 1)) if axis != -1: x", "c[k-1] * x[n-k-1] Such as the sum of the squared-error e[i] = xp[i]", "0.45 * (nperseg - 1) / 2)[1:nperseg + 1] else: window = np.hanning(nperseg", "in enumerate(frqs): if f < 50: continue if f > max_freq - 50:", "= lpc(X, num_formants*2) rts = np.roots(A) rts = rts[np.where(np.imag(rts) >= 0)] angz =", "* x[n-k-1] Such as the sum of the squared-error e[i] = xp[i] -", "Levinson-Durbin recursion for the autocorrelation matrix inversion, and fft for the autocorrelation computation.", "+ 1] else: X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg /", "from scipy.signal import gaussian from ..helper import fix_time_points, nextpow2 def lpc_ref(signal, order): \"\"\"Compute", "the direct inversion of the toeplitz matrix, which is really slow\"\"\" if signal.ndim", "window = np.hanning(nperseg + 2)[1:nperseg + 1] indices = np.arange(int(nperseg / 2), proc.shape[0]", "np.swapaxes(a, -1, axis) return a #@jit def lpc(signal, order, axis=-1): \"\"\"Compute the Linear", "if window_shape == 'gaussian': window = gaussian(nperseg + 2, 0.45 * (nperseg -", "a[j] * r[i-j] k[i-1] = -acc / e a[i] = k[i-1] for j", "not supported yet\") maxlag = x.shape[axis] nfft = int(2 ** nextpow2(2 * maxlag", "levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr): X = X * window A,", "now.\") n = r.size if n < 1: raise ValueError(\"Cannot operate on empty", "= frqs[frq_inds] bw = -1 / 2 * (new_sr / (2 * np.pi))", "X = proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2)] frqs, bw", "LPC coefficients for the signal. c = lpc(x, k) will find the k+1", "matrix, which is really slow\"\"\" if signal.ndim > 1: raise ValueError(\"Array of rank", "1. e = r[0] for i in range(1, order+1): acc = r[i] for", "x[n-2] - ... - c[k-1] * x[n-k-1] Such as the sum of the", "in range(num_frames): if nperseg % 2 != 0: X = proc[indices[i] - int(nperseg", "= np.argsort(frqs) frqs = frqs[frq_inds] bw = -1 / 2 * (new_sr /", "int(nperseg / 2) + 1] else: X = proc[indices[i] - int(nperseg / 2):indices[i]", ": : : : * : : : : _ * : -R[p]", "the sum of the squared-error e[i] = xp[i] - x[i] is minimized. Parameters", "if axis != -1: a = np.swapaxes(a, -1, axis) return a #@jit def", "supported yet\") maxlag = x.shape[axis] nfft = int(2 ** nextpow2(2 * maxlag -", "= proc[indices[i] - int(nperseg / 2):indices[i] + int(nperseg / 2)] frqs, bw =", "len(formants) if missing: formants += [(None, None)] * missing output[indices[i] / new_sr] =", "= librosa.load(file_path, sr=None, mono=False) output = signal_to_formants(sig, sr, num_formants, max_freq, win_len, time_step) return", "of the toeplitz matrix, which is really slow\"\"\" if signal.ndim > 1: raise", "def _acorr_last_axis(x, nfft, maxlag): a = np.real(ifft(np.abs(fft(x, n = nfft) ** 2))) return", "def acorr_lpc(x, axis=-1): \"\"\"Compute autocorrelation of x along the given axis. This compute", "- c[k-1] * x[n-k-1] Such as the sum of the squared-error e[i] =", "/ new_sr)) proc = lfilter([1., -alpha], 1, signal) if sr > new_sr: proc", "will have order + 1 items) Returns ------- a : array-like the solution", "def lpc(signal, order, axis=-1): \"\"\"Compute the Linear Prediction Coefficients. Return the order +", "Estimated coefficients a = np.empty(order+1, 'float32') # temporary array t = np.empty(order+1, 'float32')", "import librosa import numpy as np import scipy as sp from scipy.signal import", "r = acorr_lpc(signal, axis) return levinson_1d(r, order) def process_frame(X, window, num_formants, new_sr): X", "np.hanning(nperseg + 2)[1:nperseg + 1] indices = np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg", "= lpc_formants(signal, sr, num_formants, max_freq, time_step, win_len, window_shape='gaussian') duration = signal.shape[0] / sr", "indices = np.arange(int(nperseg / 2), proc.shape[0] - int(nperseg / 2) + 1, nperstep)" ]
[ "- {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList", "The test this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt", "arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl", "modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test", "XML {taxid} may not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y =", "= get(url) if resp.status_code == 404: print(f\"{url} returned 404, provided proteome XML {taxid}", "if arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl =", "sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {} for uID in", "print(f\"{url} returned 404, provided proteome XML {taxid} may not be registred\") else: unigoTree", "\"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {} for uID in proteomicData.uniprot:", "taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return sorted( [ (k,v) for k,v in", "from docopt import docopt #from pyT2GA import analysis from unigo import Unigo as", "= time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\" #", "taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return sorted( [ (k,v) for k,v", "proteinList = [ x for x in quantProteomic.uniprot ], fetchLatest = False) start", "abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore'])", "be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully", "uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port']", "in taxids.items() ], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False,", "fetchLatest = False) start = time.perf_counter() # Taking 10% w/ highest qtty value", "bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa", "_ for _ in quantProteomic[nTop].uniprot ] , verbose = False) stop = time.perf_counter()", "protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records, taking first {nDelta}", "if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList = [", "docopt #from pyT2GA import analysis from unigo import Unigo as createUniGOTree from unigo", "qtty value rankingsORA = unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot ] ,", "quantitative records, taking first {nDelta} as of quantity modified\") print(\"Computing ORA\") deltaUniprotID =", "quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose'])", "=> nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1", "f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\") expUniprotID = [ _ for _", "may not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo", "key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label =", "dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]},", "else \"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20 if", "--verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma proteome # The test", "import docopt #from pyT2GA import analysis from unigo import Unigo as createUniGOTree from", "taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port']", "as createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from", "resp.status_code == 404: print(f\"{url} returned 404, provided proteome XML {taxid} may not be", "abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = [] for", "verbose = False) stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is", "# TEST W/ mycoplasma proteome # The test this #python -m pyqp cli", "createUniGOTree from unigo import uloads as createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot", "sorted( [ (k,v) for k,v in taxids.items() ], key=lambda x:x[1] ) #r =", "--field=<quantity column> csv column header featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc", "in quantProteomic.uniprot ] resp = get(url) if resp.status_code == 404: print(f\"{url} returned 404,", "uColl): taxids = {} for uID in proteomicData.uniprot: uObj = uColl.get(uID) if not", "createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree", "in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return sorted( [ (k,v) for", "test this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from", "ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic =", "Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData,", "{stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {} for uID", "\"\"\" # TEST W/ mycoplasma proteome # The test this #python -m pyqp", "def typeGuardTaxID(proteomicData, uColl): taxids = {} for uID in proteomicData.uniprot: uObj = uColl.get(uID)", "- {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl):", "first {nDelta} as of quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA =", "= proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>']", "= time.perf_counter() # Taking 10% w/ highest qtty value rankingsORA = unigoTree.computeORA( [", "--ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number>", "deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\")", "universal annotation tree from {url}\") expUniprotID = [ _ for _ in quantProteomic.uniprot", "Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t", "arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList = [ x", "pyT2GA import analysis from unigo import Unigo as createUniGOTree from unigo import uloads", "proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress']", "arguments['--port'] if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from", "from .utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests import", ", verbose = False) stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time", "missingProt = [] for x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found", "-h --help Show this screen. --field=<quantity column> csv column header featuring signal --purb=purb", "print(f\"Fetching universal annotation tree from {url}\") expUniprotID = [ _ for _ in", "not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object", "Unigo as createUniGOTree from unigo import uloads as createGOTreeFromAPI from .utils import proteomicWrapper", "time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\" # Unnecssary", "= expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if", "children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic))", "this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA", "#python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA import", "in quantProteomic[nTop].uniprot ] , verbose = False) stop = time.perf_counter() print(f\"Test Top -", "not uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x)", "rankingsORA = unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot ] , verbose =", "quantProteomic[nTop].uniprot ] , verbose = False) stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\")", "= f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\") expUniprotID = [ _ for", "--port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma proteome #", "ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa", "quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x) for x in", "nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records, taking first {nDelta} as", "rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234)", "pyqp api pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h", "-m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA import analysis", "404: print(f\"{url} returned 404, provided proteome XML {taxid} may not be registred\") else:", "value rankingsORA = unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot ] , verbose", "if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else", "import app import time arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field']", "total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records, taking first", "arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\") expUniprotID", "if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\")", "= uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1", "for x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress']", "unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot ] , verbose = False) stop", "quantProteomic.uniprot ], fetchLatest = False) start = time.perf_counter() # Taking 10% w/ highest", "in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x) for x", "--alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose", "header featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee", "= unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]},", "= False) stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start}", "[ _ for _ in quantProteomic.uniprot ] resp = get(url) if resp.status_code ==", "docopt import docopt #from pyT2GA import analysis from unigo import Unigo as createUniGOTree", "= arguments['<proteomeXML>'] ) missingProt = [] for x in quantProteomic.uniprot: if not uColl.has(x):", "[] for x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found in proteome\")", "uloads as createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection", "createUniGOTree( backgroundUniColl = uColl, proteinList = [ x for x in quantProteomic.uniprot ],", "<proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show this screen. --field=<quantity", "int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'],", "of quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose =", "return sorted( [ (k,v) for k,v in taxids.items() ], key=lambda x:x[1] ) #r", "uColl, proteinList = [ x for x in quantProteomic.uniprot ], fetchLatest = False)", "csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt", "in proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress =", "successfully buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree", "time is {stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {}", "buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree =>", "if not uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return sorted(", "False) start = time.perf_counter() # Taking 10% w/ highest qtty value rankingsORA =", "10% w/ highest qtty value rankingsORA = unigoTree.computeORA( [ _ for _ in", "1 return sorted( [ (k,v) for k,v in taxids.items() ], key=lambda x:x[1] )", "backgroundUniColl = uColl, proteinList = [ x for x in quantProteomic.uniprot ], fetchLatest", "time.perf_counter() # Taking 10% w/ highest qtty value rankingsORA = unigoTree.computeORA( [ _", "20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl", "aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa", "unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList = [ x for x in", "0 taxids[uObj.taxid] += 1 return sorted( [ (k,v) for k,v in taxids.items() ],", "[ (k,v) for k,v in taxids.items() ], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes,", "= arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if", "print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl =", "[ x for x in quantProteomic.uniprot ], fetchLatest = False) start = time.perf_counter()", "len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records, taking first {nDelta} as of quantity", "unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following", "if not uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x) for x in missingProt:", "ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top -", "ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/", "--intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort>", "if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else \"5000\" url =", "api pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help", "import time arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected", "import analysis from unigo import Unigo as createUniGOTree from unigo import uloads as", "[--verbose] [--topScore=<pathway_number>] Options: -h --help Show this screen. --field=<quantity column> csv column header", "[--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show this screen. --field=<quantity column> csv", "--purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress>", "{5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids", "print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\")", "else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt w/", "time arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected Abundance", "x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else", "uID in proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid] =", "\"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\") expUniprotID = [", "app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList = [ x for", "proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] )", "[--topScore=<pathway_number>] Options: -h --help Show this screen. --field=<quantity column> csv column header featuring", "arguments['<proteomeXML>'] ) missingProt = [] for x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x}", "missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort", "in quantProteomic.uniprot ], fetchLatest = False) start = time.perf_counter() # Taking 10% w/", "import uloads as createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as", "typeGuardTaxID(proteomicData, uColl): taxids = {} for uID in proteomicData.uniprot: uObj = uColl.get(uID) if", "children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records, taking", "unigo import Unigo as createUniGOTree from unigo import uloads as createGOTreeFromAPI from .utils", "uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return sorted( [ (k,v)", "docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop", "* len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records, taking first {nDelta} as of", "print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top", "app import time arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else", "= arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree(", "arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = []", "= [] for x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found in", "from {url}\") expUniprotID = [ _ for _ in quantProteomic.uniprot ] resp =", "nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 *", "404, provided proteome XML {taxid} may not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text,", "this screen. --field=<quantity column> csv column header featuring signal --purb=purb aa --intg=intg bbb", "abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = [] for x in", "_ for _ in quantProteomic.uniprot ] resp = get(url) if resp.status_code == 404:", "# The test this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import", "not uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return sorted( [", "screen. --field=<quantity column> csv column header featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha", "arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore']", "TEST W/ mycoplasma proteome # The test this #python -m pyqp cli previous/wt2_subset.tsv", "annotation tree from {url}\") expUniprotID = [ _ for _ in quantProteomic.uniprot ]", "{} for uID in proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid in taxids:", "createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests", "uObj = uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] +=", "= False) start = time.perf_counter() # Taking 10% w/ highest qtty value rankingsORA", "nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file", "cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA import analysis from unigo", "k,v in taxids.items() ], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes,", "quantProteomic.uniprot ] resp = get(url) if resp.status_code == 404: print(f\"{url} returned 404, provided", "\"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal", "], fetchLatest = False) start = time.perf_counter() # Taking 10% w/ highest qtty", "else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching", "Usage: pyqp api pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options:", "get(url) if resp.status_code == 404: print(f\"{url} returned 404, provided proteome XML {taxid} may", "#from pyT2GA import analysis from unigo import Unigo as createUniGOTree from unigo import", "arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML", "column header featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim", "= int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file =", "taxids.items() ], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, #", "import EntrySet as createUniprotCollection from requests import get from .api import app import", "= [ _ for _ in quantProteomic.uniprot ] resp = get(url) if resp.status_code", "import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests import get from", "missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if", "not found in proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0]", "= 0 taxids[uObj.taxid] += 1 return sorted( [ (k,v) for k,v in taxids.items()", "{url}\") expUniprotID = [ _ for _ in quantProteomic.uniprot ] resp = get(url)", "EntrySet as createUniprotCollection from requests import get from .api import app import time", "aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma proteome # The", "returned 404, provided proteome XML {taxid} may not be registred\") else: unigoTree =", "found in proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress", "expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']:", "quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort =", "else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\") expUniprotID =", "analysis from unigo import Unigo as createUniGOTree from unigo import uloads as createGOTreeFromAPI", "x in quantProteomic.uniprot ], fetchLatest = False) start = time.perf_counter() # Taking 10%", "arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20", "in quantitative records, taking first {nDelta} as of quantity modified\") print(\"Computing ORA\") deltaUniprotID", "column> csv column header featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore", "if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label", "--adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma", "<proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show this screen.", "verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree =", "pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA import analysis from", "Proteomic Service Usage: pyqp api pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose]", "print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available", "print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\" # Unnecssary def", "= {} for uID in proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid in", "apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else", "arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else \"5000\" url", "highest qtty value rankingsORA = unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot ]", "pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests import get from .api import app", "print(f\"Execution time is {stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids =", "url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree from {url}\") expUniprotID = [ _", "eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" #", "in missingProt: quantProteomic.remove(x) taxid = uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\"", "unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA import analysis from unigo import Unigo", "provided proteome XML {taxid} may not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID)", "quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML =", "records, taking first {nDelta} as of quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta]", "False) stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\")", "protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins", "{taxid} may not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions", "import get from .api import app import time arguments = docopt(__doc__) #print(arguments) abnd_field", "] , verbose = False) stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution", "_ in quantProteomic.uniprot ] resp = get(url) if resp.status_code == 404: print(f\"{url} returned", "= unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot ] , verbose = False)", "(k,v) for k,v in taxids.items() ], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes,", "requests import get from .api import app import time arguments = docopt(__doc__) #print(arguments)", "=> nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative", "uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid] += 1 return", "previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt import docopt #from pyT2GA import analysis from unigo import", "Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {} for uID in proteomicData.uniprot: uObj =", "\"\"\"Quantitative Proteomic Service Usage: pyqp api pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>]", "print(\"Unigo Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\")", "[--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show this screen. --field=<quantity column>", "unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree", "# Taking 10% w/ highest qtty value rankingsORA = unigoTree.computeORA( [ _ for", "signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg", "unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]},", "= [ x for x in quantProteomic.uniprot ], fetchLatest = False) start =", "available in quantitative records, taking first {nDelta} as of quantity modified\") print(\"Computing ORA\")", "import Unigo as createUniGOTree from unigo import uloads as createGOTreeFromAPI from .utils import", "proteome # The test this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from docopt", "apiPort = arguments['--port'] if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation", ".utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests import get", ".api import app import time arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if", "for k,v in taxids.items() ], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes,", "= arguments['--port'] if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\" print(f\"Fetching universal annotation tree", "+= 1 return sorted( [ (k,v) for k,v in taxids.items() ], key=lambda x:x[1]", "] resp = get(url) if resp.status_code == 404: print(f\"{url} returned 404, provided proteome", "if resp.status_code == 404: print(f\"{url} returned 404, provided proteome XML {taxid} may not", "Show this screen. --field=<quantity column> csv column header featuring signal --purb=purb aa --intg=intg", "for x in quantProteomic.uniprot ], fetchLatest = False) start = time.perf_counter() # Taking", "= docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\"", "x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]}", "_ in quantProteomic[nTop].uniprot ] , verbose = False) stop = time.perf_counter() print(f\"Test Top", "proteins available in quantitative records, taking first {nDelta} as of quantity modified\") print(\"Computing", "= unigoTree.computeORA(deltaUniprotID, verbose = arguments['--verbose']) print(f\"Test Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\"", "#r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label = \"Corrected Abundance ratio", "mycoplasma proteome # The test this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz from", "createUniprotCollection from requests import get from .api import app import time arguments =", "[--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show this screen. --field=<quantity column> csv column", "for _ in quantProteomic.uniprot ] resp = get(url) if resp.status_code == 404: print(f\"{url}", "as createUniGOTree from unigo import uloads as createGOTreeFromAPI from .utils import proteomicWrapper from", "from unigo import Unigo as createUniGOTree from unigo import uloads as createGOTreeFromAPI from", "arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio", "\"Corrected Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']:", "proteome XML {taxid} may not be registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y", "w/ following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]}", "taking first {nDelta} as of quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA", "start = time.perf_counter() # Taking 10% w/ highest qtty value rankingsORA = unigoTree.computeORA(", "{nDelta} as of quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID,", "x for x in quantProteomic.uniprot ], fetchLatest = False) start = time.perf_counter() #", "x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label = \"Corrected", "--sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\"", "proteomicWrapper from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests import get from .api", "= createUniGOTree( backgroundUniColl = uColl, proteinList = [ x for x in quantProteomic.uniprot", "Service Usage: pyqp api pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>]", "print(f\"{len(quantProteomic)} proteins available in quantitative records, taking first {nDelta} as of quantity modified\")", "ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii", "from .api import app import time arguments = docopt(__doc__) #print(arguments) abnd_field = arguments['--field']", "for uID in proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid]", ") #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label = \"Corrected Abundance", "== 404: print(f\"{url} returned 404, provided proteome XML {taxid} may not be registred\")", "csv column header featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd", "proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid] = 0 taxids[uObj.taxid]", "= createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = [] for x in quantProteomic.uniprot: if", "= uColl, proteinList = [ x for x in quantProteomic.uniprot ], fetchLatest =", "# Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {} for uID in proteomicData.uniprot: uObj", "print(f\"{x} not found in proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid =", "tree from {url}\") expUniprotID = [ _ for _ in quantProteomic.uniprot ] resp", "arguments['--topScore'] else 20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label =", "taxids = {} for uID in proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid", "taxids[uObj.taxid] += 1 return sorted( [ (k,v) for k,v in taxids.items() ], key=lambda", "unigo import uloads as createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot import EntrySet", "for _ in quantProteomic[nTop].uniprot ] , verbose = False) stop = time.perf_counter() print(f\"Test", "= pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label = \"Corrected Abundance ratio (1,526968203)\",", "--prot=<proteomeXML> ggg --adress=<apiAdress> aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST", "x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x) for", "createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = [] for x in quantProteomic.uniprot: if not", "Options: -h --help Show this screen. --field=<quantity column> csv column header featuring signal", "from unigo import uloads as createGOTreeFromAPI from .utils import proteomicWrapper from pyproteinsExt.uniprot import", "resp = get(url) if resp.status_code == 404: print(f\"{url} returned 404, provided proteome XML", "pyqp cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show", "= uColl.taxids[0] apiAdress = arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if", "as createUniprotCollection from requests import get from .api import app import time arguments", "--topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma proteome # The test this #python", "get from .api import app import time arguments = docopt(__doc__) #print(arguments) abnd_field =", "registred\") else: unigoTree = createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt", "], key=lambda x:x[1] ) #r = pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label", "cli <proteomicTSV> <proteomeXML> [--field=<quantity_column>] [--adress=<apiAdress>] [--port=<apiPort>] [--verbose] [--topScore=<pathway_number>] Options: -h --help Show this", "W/ mycoplasma proteome # The test this #python -m pyqp cli previous/wt2_subset.tsv unigo/src/unigo/data/uniprot-proteome_UP000000625.xml.gz", "= createGOTreeFromAPI(resp.text, expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following dimensions:\")", "following dimensions:\") print(f\"\\txpTree => nodes:{x[0]} children_links:{x[1]}, total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]},", "{nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList =", "aaaa \"\"\" # TEST W/ mycoplasma proteome # The test this #python -m", "pyt2ga.analysis(proteoRes, GOpwRes, STRINGRes, mapperRes, intg=False, # abnd_label = \"Corrected Abundance ratio (1,526968203)\", ncore=3)", "in proteomicData.uniprot: uObj = uColl.get(uID) if not uObj.taxid in taxids: taxids[uObj.taxid] = 0", "uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = [] for x in quantProteomic.uniprot:", "\"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl, proteinList = [ x for x", "Taking 10% w/ highest qtty value rankingsORA = unigoTree.computeORA( [ _ for _", "nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in quantitative records,", "= abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt = [] for x", "iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma proteome # The test this", "= arguments['<proteomicTSV>'], abnd_label = abnd_field) uColl = createUniprotCollection(collectionXML = arguments['<proteomeXML>'] ) missingProt =", "= arguments['--adress'] if arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else \"5000\"", "universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)} proteins available in", "aaa --port=<apiPort> aaa --verbose iiii --topScore=<pathway_number> aaaa \"\"\" # TEST W/ mycoplasma proteome", "as of quantity modified\") print(\"Computing ORA\") deltaUniprotID = expUniprotID[:nDelta] rankingsORA = unigoTree.computeORA(deltaUniprotID, verbose", "from requests import get from .api import app import time arguments = docopt(__doc__)", "Top - {nTop}\\n{rankingsORA[:nTop]}\") if arguments['api']: app.run(port=1234) \"\"\" unigoTree = createUniGOTree( backgroundUniColl = uColl,", "from pyproteinsExt.uniprot import EntrySet as createUniprotCollection from requests import get from .api import", "featuring signal --purb=purb aa --intg=intg bbb --alpha=alpha ccc --ncore=ncore ddd --sizelim=sizelim eee --prot=<proteomeXML>", "(1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic = proteomicWrapper(", "uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x) for x in missingProt: quantProteomic.remove(x) taxid", "arguments['--adress'] else \"127.0.0.1\" apiPort = arguments['--port'] if arguments['--port'] else \"5000\" url = f\"http://{apiAdress}:{apiPort}/unigo/{taxid}\"", "w/ highest qtty value rankingsORA = unigoTree.computeORA( [ _ for _ in quantProteomic[nTop].uniprot", ") missingProt = [] for x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not", "total_protein_occurences:{x[2]}, protein_set:{x[3]}\") print(f\"\\t universeTree => nodes:{y[0]} children_links:{y[1]}, total_protein_occurences:{y[2]}, protein_set:{y[3]}\") nDelta=int(0.1 * len(quantProteomic)) print(f\"{len(quantProteomic)}", "for x in quantProteomic.uniprot: if not uColl.has(x): print(f\"{x} not found in proteome\") missingProt.append(x)", "is {stop-start} sc\") \"\"\" # Unnecssary def typeGuardTaxID(proteomicData, uColl): taxids = {} for", "stop = time.perf_counter() print(f\"Test Top - {5}\\n{rankingsORA[5]}\") print(f\"Execution time is {stop-start} sc\") \"\"\"", "expUniprotID = [ _ for _ in quantProteomic.uniprot ] resp = get(url) if", "expUniprotID) x,y = unigoTree.dimensions print(\"Unigo Object successfully buildt w/ following dimensions:\") print(f\"\\txpTree =>", "else 20 if arguments['cli']: quantProteomic = proteomicWrapper( csv_file = arguments['<proteomicTSV>'], abnd_label = abnd_field)", "--help Show this screen. --field=<quantity column> csv column header featuring signal --purb=purb aa", "Abundance ratio (1,526968203)\" nTop = int(arguments['--topScore']) if arguments['--topScore'] else 20 if arguments['cli']: quantProteomic", "[ _ for _ in quantProteomic[nTop].uniprot ] , verbose = False) stop =", "#print(arguments) abnd_field = arguments['--field'] if arguments['--field'] else \"Corrected Abundance ratio (1,526968203)\" nTop =" ]
[ "range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' # Here we define columns for", "self.CustDb() for message in self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']] = message", "createdate = Column(Integer) fname = Column(String(50)) lname = Column(String(50)) if __name__ == \"__main__\":", "str_sql = 'mysql+mysqlconnector://' + user + ':' + pw + '@localhost/ZipBank' engine =", "a normal Python instance attribute. custid = Column(Integer, primary_key=True) createdate = Column(Integer) fname", "loads from sqlalchemy import create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base", "loads(m.decode('ascii'))) ## These are two python dictionaries # Ledger is the one where", "Transaction(Base): __tablename__ = 'person' # Here we define columns for the table person", "two python dictionaries # Ledger is the one where all the transaction get", "Python instance attribute. custid = Column(Integer, primary_key=True) createdate = Column(Integer) fname = Column(String(50))", "SQL usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already a customer\") else: with engine.connect()", "message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as connection: cust = connection.execute(\"select", "m: loads(m.decode('ascii'))) ## These are two python dictionaries # Ledger is the one", "= {} self.customer_list = [] #Go back to the readme. def handleMessages(self): self.CustDb()", "message to the transaction table in your SQL usinf SQLalchemy if message['custid'] in", "# Here we define columns for the table person # Notice that each", "Column(String(50)) lname = Column(String(50)) if __name__ == \"__main__\": Base.metadata.create_all(engine) c = XactionConsumer() c.handleMessages()", "customer\") else: with engine.connect() as connection: connection.execute(\"insert into person (custid, createdate, fname, lname)", "primary_key=True) createdate = Column(Integer) fname = Column(String(50)) lname = Column(String(50)) if __name__ ==", "attribute. custid = Column(Integer, primary_key=True) createdate = Column(Integer) fname = Column(String(50)) lname =", "self.customer[message['custid']] = message # add message to the transaction table in your SQL", "is also a normal Python instance attribute. custid = Column(Integer, primary_key=True) createdate =", "= 'person' # Here we define columns for the table person # Notice", "define columns for the table person # Notice that each column is also", "String from sqlalchemy.ext.declarative import declarative_base import os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL')", "class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii')))", "user + ':' + pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine)", "connection: connection.execute(\"insert into person (custid, createdate, fname, lname) values(%s, %s, %s, %s)\", (message['custid'],", "normal Python instance attribute. custid = Column(Integer, primary_key=True) createdate = Column(Integer) fname =", "Ledger is the one where all the transaction get posted self.customer = {}", "= message # add message to the transaction table in your SQL usinf", "received'.format(message)) self.customer[message['custid']] = message # add message to the transaction table in your", "we define columns for the table person # Notice that each column is", "Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import os user = os.getenv('MYSQL_user') pw", "import declarative_base import os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://'", "CustDb(self): with engine.connect() as connection: cust = connection.execute(\"select custid from person\") cust_list =", "+ pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def", "person (custid, createdate, fname, lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname']))", "':' + pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer:", "in your SQL usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already a customer\") else:", "create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], #", "table person # Notice that each column is also a normal Python instance", "from sqlalchemy import create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import", "fname = Column(String(50)) lname = Column(String(50)) if __name__ == \"__main__\": Base.metadata.create_all(engine) c =", "message['custid'] in self.customer_list: print(\"Already a customer\") else: with engine.connect() as connection: connection.execute(\"insert into", "= create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'],", "from json import loads from sqlalchemy import create_engine, Table, Column, Integer, String from", "= KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two python", "sqlalchemy import create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import os", "custid from person\") cust_list = cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base):", "import KafkaConsumer, TopicPartition from json import loads from sqlalchemy import create_engine, Table, Column,", "in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' # Here we define columns", "handleMessages(self): self.CustDb() for message in self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']] =", "a customer\") else: with engine.connect() as connection: connection.execute(\"insert into person (custid, createdate, fname,", "python dictionaries # Ledger is the one where all the transaction get posted", "= os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user + ':' + pw + '@localhost/ZipBank'", "value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two python dictionaries # Ledger is the", "all the transaction get posted self.customer = {} self.customer_list = [] #Go back", "%s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as", "column is also a normal Python instance attribute. custid = Column(Integer, primary_key=True) createdate", "declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m:", "the readme. def handleMessages(self): self.CustDb() for message in self.consumer: message = message.value print('{}", "engine.connect() as connection: connection.execute(\"insert into person (custid, createdate, fname, lname) values(%s, %s, %s,", "def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These", "__tablename__ = 'person' # Here we define columns for the table person #", "are two python dictionaries # Ledger is the one where all the transaction", "usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already a customer\") else: with engine.connect() as", "to the readme. def handleMessages(self): self.CustDb() for message in self.consumer: message = message.value", "as connection: connection.execute(\"insert into person (custid, createdate, fname, lname) values(%s, %s, %s, %s)\",", "(message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as connection: cust =", "is the one where all the transaction get posted self.customer = {} self.customer_list", "self.customer = {} self.customer_list = [] #Go back to the readme. def handleMessages(self):", "create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import os user =", "SQLalchemy if message['custid'] in self.customer_list: print(\"Already a customer\") else: with engine.connect() as connection:", "one where all the transaction get posted self.customer = {} self.customer_list = []", "also a normal Python instance attribute. custid = Column(Integer, primary_key=True) createdate = Column(Integer)", "Column(Integer, primary_key=True) createdate = Column(Integer) fname = Column(String(50)) lname = Column(String(50)) if __name__", "the table person # Notice that each column is also a normal Python", "import os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user", "connection.execute(\"insert into person (custid, createdate, fname, lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'],", "if message['custid'] in self.customer_list: print(\"Already a customer\") else: with engine.connect() as connection: connection.execute(\"insert", "back to the readme. def handleMessages(self): self.CustDb() for message in self.consumer: message =", "= [] #Go back to the readme. def handleMessages(self): self.CustDb() for message in", "from kafka import KafkaConsumer, TopicPartition from json import loads from sqlalchemy import create_engine,", "Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest',", "for the table person # Notice that each column is also a normal", "(custid, createdate, fname, lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer)", "columns for the table person # Notice that each column is also a", "os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user +", "lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with", "Column(Integer) fname = Column(String(50)) lname = Column(String(50)) if __name__ == \"__main__\": Base.metadata.create_all(engine) c", "Integer, String from sqlalchemy.ext.declarative import declarative_base import os user = os.getenv('MYSQL_user') pw =", "your SQL usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already a customer\") else: with", "json import loads from sqlalchemy import create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative", "message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as connection: cust = connection.execute(\"select custid", "self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two", "print(\"Already a customer\") else: with engine.connect() as connection: connection.execute(\"insert into person (custid, createdate,", "__init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are", "in self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']] = message # add message", "import loads from sqlalchemy import create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative import", "{} self.customer_list = [] #Go back to the readme. def handleMessages(self): self.CustDb() for", "fname, lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self):", "class Transaction(Base): __tablename__ = 'person' # Here we define columns for the table", "+ '@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer", "engine = create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new',", "TopicPartition from json import loads from sqlalchemy import create_engine, Table, Column, Integer, String", "Here we define columns for the table person # Notice that each column", "These are two python dictionaries # Ledger is the one where all the", "transaction get posted self.customer = {} self.customer_list = [] #Go back to the", "the transaction table in your SQL usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already", "pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user + ':' + pw +", "'@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer =", "person\") cust_list = cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ =", "self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']] = message # add message to", "cust_list = cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person'", "declarative_base import os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' +", "import create_engine, Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import os user", "# Ledger is the one where all the transaction get posted self.customer =", "with engine.connect() as connection: connection.execute(\"insert into person (custid, createdate, fname, lname) values(%s, %s,", "= Column(Integer, primary_key=True) createdate = Column(Integer) fname = Column(String(50)) lname = Column(String(50)) if", "KafkaConsumer, TopicPartition from json import loads from sqlalchemy import create_engine, Table, Column, Integer,", "# Notice that each column is also a normal Python instance attribute. custid", "row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' # Here we define", "= Column(String(50)) lname = Column(String(50)) if __name__ == \"__main__\": Base.metadata.create_all(engine) c = XactionConsumer()", "dictionaries # Ledger is the one where all the transaction get posted self.customer", "in self.customer_list: print(\"Already a customer\") else: with engine.connect() as connection: connection.execute(\"insert into person", "else: with engine.connect() as connection: connection.execute(\"insert into person (custid, createdate, fname, lname) values(%s,", "'person' # Here we define columns for the table person # Notice that", "as connection: cust = connection.execute(\"select custid from person\") cust_list = cust.fetchall() for row", "#Go back to the readme. def handleMessages(self): self.CustDb() for message in self.consumer: message", "## These are two python dictionaries # Ledger is the one where all", "%s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as connection: cust", "connection: cust = connection.execute(\"select custid from person\") cust_list = cust.fetchall() for row in", "self.customer_list = [] #Go back to the readme. def handleMessages(self): self.CustDb() for message", "get posted self.customer = {} self.customer_list = [] #Go back to the readme.", "user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user + ':'", "custid = Column(Integer, primary_key=True) createdate = Column(Integer) fname = Column(String(50)) lname = Column(String(50))", "with engine.connect() as connection: cust = connection.execute(\"select custid from person\") cust_list = cust.fetchall()", "where all the transaction get posted self.customer = {} self.customer_list = [] #Go", "XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ##", "def handleMessages(self): self.CustDb() for message in self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']]", "= Column(Integer) fname = Column(String(50)) lname = Column(String(50)) if __name__ == \"__main__\": Base.metadata.create_all(engine)", "pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine) class XactionConsumer: def __init__(self):", "kafka import KafkaConsumer, TopicPartition from json import loads from sqlalchemy import create_engine, Table,", "def CustDb(self): with engine.connect() as connection: cust = connection.execute(\"select custid from person\") cust_list", "add message to the transaction table in your SQL usinf SQLalchemy if message['custid']", "for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' # Here we", "values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect()", "= declarative_base(bind=engine) class XactionConsumer: def __init__(self): self.consumer = KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda", "os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user + ':' + pw", "os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user + ':' + pw + '@localhost/ZipBank' engine", "bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two python dictionaries #", "for message in self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']] = message #", "cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' # Here", "message # add message to the transaction table in your SQL usinf SQLalchemy", "from person\") cust_list = cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__", "to the transaction table in your SQL usinf SQLalchemy if message['custid'] in self.customer_list:", "message.value print('{} received'.format(message)) self.customer[message['custid']] = message # add message to the transaction table", "+ ':' + pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base = declarative_base(bind=engine) class", "= connection.execute(\"select custid from person\") cust_list = cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row)", "table in your SQL usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already a customer\")", "connection.execute(\"select custid from person\") cust_list = cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class", "self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' # Here we define columns for the", "the one where all the transaction get posted self.customer = {} self.customer_list =", "message = message.value print('{} received'.format(message)) self.customer[message['custid']] = message # add message to the", "createdate, fname, lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def", "from sqlalchemy.ext.declarative import declarative_base import os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql", "engine.connect() as connection: cust = connection.execute(\"select custid from person\") cust_list = cust.fetchall() for", "message in self.consumer: message = message.value print('{} received'.format(message)) self.customer[message['custid']] = message # add", "'mysql+mysqlconnector://' + user + ':' + pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base", "readme. def handleMessages(self): self.CustDb() for message in self.consumer: message = message.value print('{} received'.format(message))", "[] #Go back to the readme. def handleMessages(self): self.CustDb() for message in self.consumer:", "that each column is also a normal Python instance attribute. custid = Column(Integer,", "KafkaConsumer('bank-customer-new', bootstrap_servers=['localhost:9092'], # auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two python dictionaries", "Notice that each column is also a normal Python instance attribute. custid =", "= 'mysql+mysqlconnector://' + user + ':' + pw + '@localhost/ZipBank' engine = create_engine(str_sql)", "+ user + ':' + pw + '@localhost/ZipBank' engine = create_engine(str_sql) Base =", "sqlalchemy.ext.declarative import declarative_base import os user = os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql =", "= os.getenv('MYSQL_user') pw = os.getenv('MYSQL') str_sql = 'mysql+mysqlconnector://' + user + ':' +", "print('{} received'.format(message)) self.customer[message['custid']] = message # add message to the transaction table in", "person # Notice that each column is also a normal Python instance attribute.", "self.customer_list: print(\"Already a customer\") else: with engine.connect() as connection: connection.execute(\"insert into person (custid,", "Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import os user = os.getenv('MYSQL_user')", "the transaction get posted self.customer = {} self.customer_list = [] #Go back to", "print(self.customer) def CustDb(self): with engine.connect() as connection: cust = connection.execute(\"select custid from person\")", "cust = connection.execute(\"select custid from person\") cust_list = cust.fetchall() for row in range(len(cust_list)):", "each column is also a normal Python instance attribute. custid = Column(Integer, primary_key=True)", "message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as connection: cust = connection.execute(\"select custid from", "transaction table in your SQL usinf SQLalchemy if message['custid'] in self.customer_list: print(\"Already a", "posted self.customer = {} self.customer_list = [] #Go back to the readme. def", "= cust.fetchall() for row in range(len(cust_list)): self.customer_list.append(row) class Transaction(Base): __tablename__ = 'person' #", "%s, %s)\", (message['custid'], message['createdate'], message['fname'], message['lname'])) print(self.customer) def CustDb(self): with engine.connect() as connection:", "# auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two python dictionaries # Ledger", "# add message to the transaction table in your SQL usinf SQLalchemy if", "into person (custid, createdate, fname, lname) values(%s, %s, %s, %s)\", (message['custid'], message['createdate'], message['fname'],", "instance attribute. custid = Column(Integer, primary_key=True) createdate = Column(Integer) fname = Column(String(50)) lname", "= message.value print('{} received'.format(message)) self.customer[message['custid']] = message # add message to the transaction", "auto_offset_reset='earliest', value_deserializer=lambda m: loads(m.decode('ascii'))) ## These are two python dictionaries # Ledger is" ]
[ "is separate and sequential per result. # However, the words list within an", "\"rb\") as audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16,", "speaker tag into a dictionary of word lists for i in range(nbr_of_persons): word_counter", "word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)] = speaker_data #print(output_result)", "speech_v1p1beta1 as speech import io #Set env variable, because it resets every shell", "includes all the words # from all the results thus far. Thus, to", "get all the words with speaker # tags, you only have to take", "operation to complete...\") response = client.recognize(config=config, audio=audio) # The transcript within each result", "configured correctly model=\"video\", ) print(\"Waiting for operation to complete...\") response = client.recognize(config=config, audio=audio)", "into a dictionary of word lists for i in range(nbr_of_persons): word_counter = 0", "output_result = {} #saving each word with corresponding speaker tag into a dictionary", "with open(speech_file, \"rb\") as audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content) config =", "enable_separate_recognition_per_channel=True, #change this if respeaker is configured correctly model=\"video\", ) print(\"Waiting for operation", "words_info = result.alternatives[0].words output_result = {} #saving each word with corresponding speaker tag", "tag into a dictionary of word lists for i in range(nbr_of_persons): word_counter =", "== (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)]", "words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)] = speaker_data", "response = client.recognize(config=config, audio=audio) # The transcript within each result is separate and", "with speaker # tags, you only have to take the words list from", "word_counter = 0 speaker_data = {} words = [] for word_info in words_info:", "= [] for word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1", "the words with speaker # tags, you only have to take the words", "Thus, to get all the words with speaker # tags, you only have", "as speech import io #Set env variable, because it resets every shell session", "it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons):", "1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)] = speaker_data #print(output_result) return output_result", "word lists for i in range(nbr_of_persons): word_counter = 0 speaker_data = {} words", "lists for i in range(nbr_of_persons): word_counter = 0 speaker_data = {} words =", "corresponding speaker tag into a dictionary of word lists for i in range(nbr_of_persons):", "= speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker", "words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] =", "sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is configured correctly", "channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\") as", "speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\") as audio_file: content = audio_file.read() audio", "speech_file = audio_file with open(speech_file, \"rb\") as audio_file: content = audio_file.read() audio =", "audio_file with open(speech_file, \"rb\") as audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content) config", "the results thus far. Thus, to get all the words with speaker #", "= {} #saving each word with corresponding speaker tag into a dictionary of", "resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client", "the words list from the last result: result = response.results[-1] words_info = result.alternatives[0].words", "and sequential per result. # However, the words list within an alternative includes", "= audio_file with open(speech_file, \"rb\") as audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content)", "print(\"Waiting for operation to complete...\") response = client.recognize(config=config, audio=audio) # The transcript within", "have to take the words list from the last result: result = response.results[-1]", "{} words = [] for word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter", "all the words with speaker # tags, you only have to take the", "encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is configured", "if respeaker is configured correctly model=\"video\", ) print(\"Waiting for operation to complete...\") response", "of word lists for i in range(nbr_of_persons): word_counter = 0 speaker_data = {}", "google.cloud import speech_v1p1beta1 as speech import io #Set env variable, because it resets", "to get all the words with speaker # tags, you only have to", "each word with corresponding speaker tag into a dictionary of word lists for", "respeaker is configured correctly model=\"video\", ) print(\"Waiting for operation to complete...\") response =", "client.recognize(config=config, audio=audio) # The transcript within each result is separate and sequential per", "enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is configured correctly model=\"video\", )", "list within an alternative includes all the words # from all the results", "io #Set env variable, because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\"", "for operation to complete...\") response = client.recognize(config=config, audio=audio) # The transcript within each", "with corresponding speaker tag into a dictionary of word lists for i in", "+= 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)] = speaker_data #print(output_result) return", "# from all the results thus far. Thus, to get all the words", "= speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\") as audio_file: content = audio_file.read()", "import io #Set env variable, because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] =", "(i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)] =", "words with speaker # tags, you only have to take the words list", "word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter", "audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\",", "speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is", "audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is configured correctly model=\"video\", ) print(\"Waiting for", "# The transcript within each result is separate and sequential per result. #", "#saving each word with corresponding speaker tag into a dictionary of word lists", "a dictionary of word lists for i in range(nbr_of_persons): word_counter = 0 speaker_data", "= {} words = [] for word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word)", "you only have to take the words list from the last result: result", "sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\") as audio_file:", "speaker # tags, you only have to take the words list from the", "The transcript within each result is separate and sequential per result. # However,", "the words list within an alternative includes all the words # from all", "to take the words list from the last result: result = response.results[-1] words_info", "# tags, you only have to take the words list from the last", "transcript within each result is separate and sequential per result. # However, the", "all the results thus far. Thus, to get all the words with speaker", "from google.cloud import speech_v1p1beta1 as speech import io #Set env variable, because it", "list from the last result: result = response.results[-1] words_info = result.alternatives[0].words output_result =", "tags, you only have to take the words list from the last result:", "result is separate and sequential per result. # However, the words list within", "in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"]", "sequential per result. # However, the words list within an alternative includes all", "results thus far. Thus, to get all the words with speaker # tags,", "dictionary of word lists for i in range(nbr_of_persons): word_counter = 0 speaker_data =", "words # from all the results thus far. Thus, to get all the", "= audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons,", "if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words", "= word_counter speaker_data[\"words\"] = words output_result[(i+1)] = speaker_data #print(output_result) return output_result #test #diarization_service(\"audiofiles/Test7.wav\")", "word with corresponding speaker tag into a dictionary of word lists for i", "= response.results[-1] words_info = result.alternatives[0].words output_result = {} #saving each word with corresponding", "each result is separate and sequential per result. # However, the words list", "from the last result: result = response.results[-1] words_info = result.alternatives[0].words output_result = {}", "words list within an alternative includes all the words # from all the", "complete...\") response = client.recognize(config=config, audio=audio) # The transcript within each result is separate", "to complete...\") response = client.recognize(config=config, audio=audio) # The transcript within each result is", "the last result: result = response.results[-1] words_info = result.alternatives[0].words output_result = {} #saving", "model=\"video\", ) print(\"Waiting for operation to complete...\") response = client.recognize(config=config, audio=audio) # The", "as audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate,", "speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this", "audio=audio) # The transcript within each result is separate and sequential per result.", "far. Thus, to get all the words with speaker # tags, you only", "words list from the last result: result = response.results[-1] words_info = result.alternatives[0].words output_result", "is configured correctly model=\"video\", ) print(\"Waiting for operation to complete...\") response = client.recognize(config=config,", "import speech_v1p1beta1 as speech import io #Set env variable, because it resets every", "content = audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True,", "audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True,", "per result. # However, the words list within an alternative includes all the", "shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient()", "result.alternatives[0].words output_result = {} #saving each word with corresponding speaker tag into a", "speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\")", "this if respeaker is configured correctly model=\"video\", ) print(\"Waiting for operation to complete...\")", "nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\") as audio_file: content", "{} #saving each word with corresponding speaker tag into a dictionary of word", "all the words # from all the results thus far. Thus, to get", "0 speaker_data = {} words = [] for word_info in words_info: if(word_info.speaker_tag ==", "env variable, because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file,", "alternative includes all the words # from all the results thus far. Thus,", "#Set env variable, because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def", "\"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file with", "range(nbr_of_persons): word_counter = 0 speaker_data = {} words = [] for word_info in", "thus far. Thus, to get all the words with speaker # tags, you", "variable, because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels,", "= 0 speaker_data = {} words = [] for word_info in words_info: if(word_info.speaker_tag", "= \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file", "speaker_data = {} words = [] for word_info in words_info: if(word_info.speaker_tag == (i+1)):", ") print(\"Waiting for operation to complete...\") response = client.recognize(config=config, audio=audio) # The transcript", "every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client =", "last result: result = response.results[-1] words_info = result.alternatives[0].words output_result = {} #saving each", "within an alternative includes all the words # from all the results thus", "because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate,", "audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels,", "only have to take the words list from the last result: result =", "[] for word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"]", "= result.alternatives[0].words output_result = {} #saving each word with corresponding speaker tag into", "from all the results thus far. Thus, to get all the words with", "correctly model=\"video\", ) print(\"Waiting for operation to complete...\") response = client.recognize(config=config, audio=audio) #", "diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is configured correctly model=\"video\", ) print(\"Waiting", "within each result is separate and sequential per result. # However, the words", "import os from google.cloud import speech_v1p1beta1 as speech import io #Set env variable,", "in range(nbr_of_persons): word_counter = 0 speaker_data = {} words = [] for word_info", "language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if respeaker is configured correctly model=\"video\",", "#change this if respeaker is configured correctly model=\"video\", ) print(\"Waiting for operation to", "response.results[-1] words_info = result.alternatives[0].words output_result = {} #saving each word with corresponding speaker", "an alternative includes all the words # from all the results thus far.", "i in range(nbr_of_persons): word_counter = 0 speaker_data = {} words = [] for", "result = response.results[-1] words_info = result.alternatives[0].words output_result = {} #saving each word with", "os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file =", "separate and sequential per result. # However, the words list within an alternative", "However, the words list within an alternative includes all the words # from", "the words # from all the results thus far. Thus, to get all", "for word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter += 1 speaker_data[\"number_of_words\"] =", "result. # However, the words list within an alternative includes all the words", "= speech.RecognitionAudio(content=content) config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change", "= client.recognize(config=config, audio=audio) # The transcript within each result is separate and sequential", "session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = \"/home/robin_jf_andersson/mbox_speaker_diarization/mbox1-28508a73fde1.json\" def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file", "os from google.cloud import speech_v1p1beta1 as speech import io #Set env variable, because", "take the words list from the last result: result = response.results[-1] words_info =", "words = [] for word_info in words_info: if(word_info.speaker_tag == (i+1)): words.append(word_info.word) word_counter +=", "for i in range(nbr_of_persons): word_counter = 0 speaker_data = {} words = []", "speaker_data[\"number_of_words\"] = word_counter speaker_data[\"words\"] = words output_result[(i+1)] = speaker_data #print(output_result) return output_result #test", "config = speech.RecognitionConfig( encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16, sample_rate_hertz=sample_rate, language_code=\"en-US\", enable_speaker_diarization=True, diarization_speaker_count=nbr_of_persons, audio_channel_count=channels, enable_separate_recognition_per_channel=True, #change this if", "open(speech_file, \"rb\") as audio_file: content = audio_file.read() audio = speech.RecognitionAudio(content=content) config = speech.RecognitionConfig(", "client = speech.SpeechClient() speech_file = audio_file with open(speech_file, \"rb\") as audio_file: content =", "def speaker_diarization(audio_file, channels, sample_rate, nbr_of_persons): client = speech.SpeechClient() speech_file = audio_file with open(speech_file,", "speech import io #Set env variable, because it resets every shell session os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"]", "result: result = response.results[-1] words_info = result.alternatives[0].words output_result = {} #saving each word", "# However, the words list within an alternative includes all the words #" ]
[ "self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth =", "self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel =", "CText class Prob1(Parabola): CONFIG = { 'x_min' : -5 } def construct(self): self.adjust_x_range()", "Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\", "self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2, k1Label, k2Label]]) k1f = Line()", "summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda", "m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk", "k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\", "Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda", "LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) ))", "0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in", "class Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range()", "Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label,", "m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1),", "= Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF", "t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq = Line() pq.add_updater(lambda", "self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE)", "buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2", "self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\", "m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1", "m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) ))", "import Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG = { 'x_min' :", "= CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def))", "self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value,", "= 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\", "0 else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT +", "= Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda", "p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label", "m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e)", "pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp = Line()", "self.wait(10) class Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self):", "k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1),", "from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG = { 'x_min' : -5 }", "m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2))", "self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1 =", "= Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2,", "kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda", "m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line()", "self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN)", "LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value()", "Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e", "= 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF))", ")) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2, k1Label, k2Label]])", ")) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1)", "focus.get_center() - p.get_center() vec = normalize(vec) return focus.get_center() + 2 * vec pf.add_updater(lambda", "k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1),", "focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8)", "m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2)", "inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value()", "self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda", "TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(),", "m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line()", "= 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line()", "k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1", "k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q),", "1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in [graph,", "= TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\", "qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value,", "p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p,", "in [graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth", "RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for", "= TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus,", "self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in", "self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda", "m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 =", "self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf =", "ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 }", "p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1)))", "m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord", "LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) ))", "focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph,", "K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf))", "l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT)", "form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent(): vec", "LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE)", "p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth", "focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af", "1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q,", "e in\\ [m, m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1", "= Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5))", "inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value()", "Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph", "k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2)", "TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p =", "self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1))", "k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda", "-1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG", "in [a, a_label]]) y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1", "-5 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus =", "TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1)))", "m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1),", "focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5)", "self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent(): vec = focus.get_center()", "= Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel", "-self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot()", "q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda", "e in [graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN)", "\\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3))", "x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line))", "Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label =", "RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label", "q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E)", "e in [graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE)", "m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label, p, p_label]]) k =", "pf = DashedLine() def get_pf_extent(): vec = focus.get_center() - p.get_center() vec = normalize(vec)", "k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a )))", "pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt =", "= 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\", "for e in [a, a_label]]) y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth", "p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5)", "q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1", "-y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()),", "!= 0 else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT", "p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7)", "m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp", "focusLabel]]) y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value()", "self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2,", "m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda", "RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for", "m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp", "Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on(", "TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth", "q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -9)) self.wait(2)", "ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\", "= Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() )", "q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1)", "= Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\", "1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k,", "self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3),", "self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class", "Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph", "p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT,", "a) )) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for", "1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth =", "Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e)", "p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3))", "self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min':", "focus.get_center() + 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf))", "m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1)", "self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in \\ [k1, k1_label, p1q1, p2q2]]) k2", "LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) ))", "1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF))", "RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1", ".next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1", "self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10)", "m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for", ")) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5)", "m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q)", "DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth =", ".next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t,", "p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot()", "p.get_center() vec = normalize(vec) return focus.get_center() + 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on(", "Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e)", "summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1)", "p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f =", "m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for", "self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10)", "} def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus())", "form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent(): vec =", "= self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label =", "self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT)", "directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7)", "k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label =", "= TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p", "m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5)", "self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG =", "self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 *", "x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8)", "inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px = Line()", "focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f))", "= ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus,", "[q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1", "focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label =", "q2_label, q1q2]]) p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda", "self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label", "self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q)", "buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2", "1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2", "px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label))", "self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min': -10", "q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2", "explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value,", "self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT", "self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in \\ [k1, k1_label, p1q1,", "m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1)", "{ 'x_min' : -5 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix =", "1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m,", "for e in\\ [m, m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth =", "buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1)))", "p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\", "m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(", "def get_pf_extent(): vec = focus.get_center() - p.get_center() vec = normalize(vec) return focus.get_center() +", "[a, a_label]]) y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda", "LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp =", "from manimlib.imports import * from ManimProjects.utils.Parabola import Parabola from ManimProjects.utils.geometry import CText class", "Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value,", "m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f))", "a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e", "TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(),", "= TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) y_val", "-2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min':", "= 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\", "= Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP)", "[k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5),", "0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda", "p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 =", "CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\", "af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent(): vec = focus.get_center() -", "= Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2)", "= { 'x_min' : -5 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix", "m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line()", "in\\ [k, k_label, pk, mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center()", "focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9)", "l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda m:\\", "= TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on(", "m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on(", "x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot()", "))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0", "k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on(", "pk, mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf =", "0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1))", "self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter", "focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) h_line", "= Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT)", "self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5)", "'x_min': -10 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus", ")) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q),", "form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(),", "interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4))", "TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1,", "focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e)", "m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e)", "q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda", "+ UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() ))", "self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG = { 'focus': 3,", "m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line()", "p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5)", "= TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5),", "+ UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() )))", "ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9))", "construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth", "= TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on(", "m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\", "-3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG = { 'focus':", "k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3)", "l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center()", "q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() )))", "DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent(): vec = focus.get_center() - p.get_center() vec", "m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2) self.play(ApplyMethod(y_val.set_value,", "= 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF))", "l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk, mp]])", "+ UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth", "if y_val.get_value() != 0 else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\", "l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label =", "self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value,", "TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label", "p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda", "1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p,", "e in [graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE)", "RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE)", "m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label, p, p_label]]) k = Dot()", "\\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def", "= 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth", "))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value()", "self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label))", "= Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label", "UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal() x", "Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\", "= Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1),", "m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else self.coords_to_point(0, 0)", "= Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT", "RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1", "import CText class Prob1(Parabola): CONFIG = { 'x_min' : -5 } def construct(self):", "= Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t", "k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2,", "for\\ e in [graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1 = Dot()", "= TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on(", "m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label", "* RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q", "self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf),", "t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(),", "self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1 =", "m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF))", "self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min':", "TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\", "self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 * DOWN)", "= TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1", "k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2", "p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for", "class Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range()", "l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()),", "+ 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot()", "p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE)", "in [graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda", "self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT)", "k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value,", "self.play(*[ShowCreation(e) for e in\\ [m, m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth", "k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(),", "self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q),", "q) )) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) ))", "self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT +", "m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1", "buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label", "m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1,", "-9)) self.wait(10) class Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def", "p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line()", "self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2)", "y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e", "self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait()", "inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on(", "Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda", "p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]]) k1 = Dot()", "ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG = {", "k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\", "'x_min' : -5 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix()", "k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\", "Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k, k_label,", "0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE)", "focus, focusLabel]]) q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda", "self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2))", "m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3),", ")) self.play(*[ShowCreation(e) for e in \\ [k1, k1_label, p1q1, p2q2]]) k2 = Dot()", "ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2, k1Label, k2Label]]) k1f =", "m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\", "p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\", "= 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix,", "-3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG = { 'focus':", "= focus.get_center() - p.get_center() vec = normalize(vec) return focus.get_center() + 2 * vec", "for e in \\ [k2, k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\", "p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a", "x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val", "Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if", "ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola):", "self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG = { 'focus': 2,", "e in [graph, directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth", "q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot()", "for\\ e in [graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal() x = Dot()", "inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -9))", "import * from ManimProjects.utils.Parabola import Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG", "k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\", "self.play(*[ShowCreation(e) for e in [k1, k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\", "RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q =", "= Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\", "p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in \\", "m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1,", "m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\ [k2,", "self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2))", "l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e)", "= Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\", "= 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth", "p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1)))", "mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\", "= Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label", "self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 *", "self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e", "LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q)))", "k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\", "Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in", "explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value,", "q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1,", "[k1, k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() ))", "self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2, k1Label, k2Label]]) k1f", "m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth =", "p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label,", "get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8))", "class Prob1(Parabola): CONFIG = { 'x_min' : -5 } def construct(self): self.adjust_x_range() graph", "ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value()", "p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1", "focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e", "m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center())", "k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f =", "e in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth", "focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix,", ")) self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk, mp]]) kf = Line() kf.add_updater(lambda", "= Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5)", ")) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px =", "DashedLine() def get_pf_extent(): vec = focus.get_center() - p.get_center() vec = normalize(vec) return focus.get_center()", "directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5)", "focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e)", "for e in\\ [p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1", "[p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1,", "l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk, mp]]) kf", "Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda", "self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda", "CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph =", "self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0,", "self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label))", "p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 =", "= Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7)", "x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq", "in [k1, k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center()", "= 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix,", "k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1", "RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq = Line()", "\\ [k2, k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center()", "m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter),", "m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value,", "k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f", "RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\", "label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE)", "m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e", "q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda", "= Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line()", "= TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf =", "qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda", "mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF')", ")) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1)", "5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line()", "Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda", "y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\", "))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E)", "= 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x),", "self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1 = Dot()", "[k2, k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() ))", "self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter =", "m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT))", "in [graph, directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth =", "self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda", "Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label =", "focus.get_center()) pf = DashedLine() def get_pf_extent(): vec = focus.get_center() - p.get_center() vec =", "CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y", "self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class", "= Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line()", "TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) q1_y =", "p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\", "form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2)", "y_val.get_value() != 0 else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter,", "e in\\ [k, k_label, pk, mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(),", "k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f),", "k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp", "m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1", "a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a,", "= Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda", "p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() )))", "q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2, k1Label,", "q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP))", "p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]])", "2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG = {", "0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px", "\\ [k1, k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda", "RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) h_line =", "k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value,", "- p.get_center() vec = normalize(vec) return focus.get_center() + 2 * vec pf.add_updater(lambda m:\\", "ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value()", "self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT,", "m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2,", "m.next_to(t, RIGHT + UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()),", "p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value,", "'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix =", "2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix()", "= TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) a", "p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk, mp]]) kf = Line()", "for\\ e in [graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1 = Dot()", "m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]]) p1_y", "p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\", "m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk =", "m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else self.coords_to_point(0, 0) ))", "focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def =", "ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5)", "focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2),", "Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda", "= ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()),", "* DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth", "Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t),", "m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p =", ")) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e)", "qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value,", "in \\ [k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5)", "\\ [k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value,", "+ UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal()", "self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e)", "= Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F')", "y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() )))", "UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth =", "q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2,", "self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 =", "TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1", "ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label))", "p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q,", "k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a)", "= Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5)", "k_label, pk, mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf", "= 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT,", "= Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q", "self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on(", "x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP)", "focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in", "focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT +", "m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line()", "p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT,", "ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 * DOWN) self.play(Write(fc_def))", "self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola):", "= ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()),", "m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda", "Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph", "1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label))", "p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2", "q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2,", "p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda", "m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2,", "1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1,", "= Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line()", "m:\\ m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on(", "2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value,", "focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf))", "self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1)", "= 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT,", "k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary =", "CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() ))", "p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for", "p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth =", "p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot()", "return focus.get_center() + 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af),", "4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else self.coords_to_point(0, 0) )) inter_label =", "k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT))", "m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda", "y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE)", "RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\", "k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1,", "3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min': -4", "normalize(vec) return focus.get_center() + 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() ))", "m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on(", "{ 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix", "Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label,", "[graph, directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth =", "= Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5))", "self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG = { 'focus':", "))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2", "m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label,", "px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form =", "for\\ e in [graph, directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0))", "fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2)", ")) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10)", "Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label =", "k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f", "k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain", "q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\", "TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val = ValueTracker(8) m", "self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk, mp]]) kf = Line() kf.add_updater(lambda l:\\", "Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda", "= Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label", ")) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form", ")) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1 =", "0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT + UP) self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val =", "m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 =", "q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 =", "directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label", "Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda l:\\", "focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]])", "for e in [k1, k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on(", "focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph,", "[graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth =", "= Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda", "self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10)", "= 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\", "= CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center()", "-4 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus =", "m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda", "Prob1(Parabola): CONFIG = { 'x_min' : -5 } def construct(self): self.adjust_x_range() graph =", "))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\", "UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px))", "m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) ))", "1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2", "Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in", "q2) )) self.play(*[ShowCreation(e) for e in \\ [k1, k1_label, p1q1, p2q2]]) k2 =", "= TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val = ValueTracker(8)", "self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1", "q_y = ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label =", "self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt)", "= CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 =", "l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 =", ")) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq =", "q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) k1f =", "m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda", ")) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e)", ") if y_val.get_value() != 0 else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda", "3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix()", "'x_min': -4 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus", "l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7)", "Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label =", "self.wait(10) class Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self):", "Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\", "m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\", "self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value,", "m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() )))", "in\\ [p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\", "= Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\", "t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 )))", "p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() )))", "self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line()", "= self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus,", "= TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on(", "))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 =", "ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center())", "= TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on(", "[graph, directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\", "self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth =", "ManimProjects.utils.Parabola import Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG = { 'x_min'", "= ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth", "m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1", "q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) explain =", "RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\", "= Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt))", "e in \\ [k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain))", "q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE)", "1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1,", "[graph, directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1", "h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label", "q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() )))", "q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1,", "= self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus,", "buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1)))", ")) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F", "= 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\", "q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\", "y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()),", "self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF))", "e in [a, a_label]]) y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth =", "self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1", "def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN)", "* from ManimProjects.utils.Parabola import Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG =", "kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(),", "self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG = { 'focus': 2,", "self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) k1f", "m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(),", "self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1))", "= 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT,", "a_label]]) y_val = ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\", "Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form", "= ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()),", "k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label =", "Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2))", "p.get_center(), self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() ))", "= 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT,", "= 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]])", "vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3)", "m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda", "k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1", "m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in \\ [k1,", "focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5))", "3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min': -10", "mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form =", "mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line()", "TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value,", "self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min':", "pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on(", "self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(),", "q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) ))", "= DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent(): vec = focus.get_center() - p.get_center()", "q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5)", "pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label))", "m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form))", "q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda", "m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq", "q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]])", "TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) y_val =", "= 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot()", "p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT,", "= Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label", "p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\", "k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label =", "inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else self.coords_to_point(0,", "in [graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth", "self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola):", "= Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7)", "= TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE)", "MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine() def get_pf_extent():", "= self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel", "a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val = ValueTracker(8) m =", "focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\", "self.play(ShowCreation(h_line)) self.play(ShowCreation(x), ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1", "p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1", "ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG = {", "m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\", "p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1)", "Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t =", "m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3)", "= Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait()", "m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5)", "m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in \\ [k1, k1_label,", "m:\\ m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF))", ")) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af =", "= CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth", "Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph", "= normalize(vec) return focus.get_center() + 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent()", "-9)) self.wait(10) class Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def", "m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p", "RIGHT + UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value()", "k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1,", "m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1,", "= Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label", "p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT))", "Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center())) self.play(ShowCreation(qf)) self.wait(1) self.play(ApplyMethod(q_y.set_value, -1)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1)", "= TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth =", "t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda", "form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form)) interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq))", "p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7)", "q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "= Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e", "class Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range()", "-10 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus =", "self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2),", "a.set_fill(DARK_BROWN) a.move_to(self.coords_to_point(0, 0)) a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for", ")) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e", "self.play(ApplyMethod(y_val.set_value, 0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG = {", "m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦')", "m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1,", "1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus,", "k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\", "a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda", "RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda", "m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF))", "m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center()", "0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG =", "= TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label, p,", "TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label, p, p_label]])", "m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p,", "m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) ))", "manimlib.imports import * from ManimProjects.utils.Parabola import Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola):", "= TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT))", "self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG =", "class Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range()", ")) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT", "vec = normalize(vec) return focus.get_center() + 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(),", "m.put_start_and_end_on( self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1,", "interq = Line() interq.add_updater(lambda m:\\ m.put_start_and_end_on( inter.get_center(), q.get_center() )) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4))", "m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda", "Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary", "ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value()", "self.wait(10) class Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self):", "focus, focusLabel]]) y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()),", "= TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda", ")) self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -9)) self.wait(2) self.play(ApplyMethod(y_val.set_value,", "k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\", "= TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE)", "self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q2),", "buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter),", "inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() !=", "m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2,", "ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 }", "Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG = { 'x_min' : -5", "p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 =", "p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\", "ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 }", "l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() ))", "m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 =", "focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain =", "ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq))", "= Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label", "k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth", "1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus,", "p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\", "q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) ))", "for e in \\ [k1, k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth", "q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7)", "CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value,", "= Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 = Line()", "p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda", "p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5)", "Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda", ")) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) k1f = Line()", "label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2", "0.5)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 3), ApplyMethod(q_y.set_value, 0.5)) self.wait(10) class Prob2(Parabola): CONFIG = { 'focus':", "ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot()", "Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x, LEFT +", "TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT +", ")) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3)", "p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3)", "1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE)", "TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(),", "= TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\", "k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2,", "for e in \\ [k2, k2_label, p2q1, p1q2]]) explain = CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2)", "self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()),", "ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG = { 'x_min' : -5 } def", "l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() )) mf = Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center()", "'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix =", "RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) a = Dot() a.set_fill(DARK_BROWN)", "1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF))", "0.5)) self.wait(10) class Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def", "in \\ [k1, k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1", "self.right(p2, q1), self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2),", "self.chord_to_directrix(p2, q1) )) p1q2 = Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2)", "m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F", "m:\\ m.move_to(self.chord_to_directrix(p2, q1))) k2_label = TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 =", "self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary))", "t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5)", "in\\ [m, m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda", "else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5) inter_label.add_updater(lambda m:\\ m.next_to(inter, LEFT + UP,", "))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait()", "from ManimProjects.utils.Parabola import Parabola from ManimProjects.utils.geometry import CText class Prob1(Parabola): CONFIG = {", "focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) a = Dot()", "self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG", "self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait()", "in \\ [k2, k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(),", "focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]])", "LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q)))", "TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p,", "RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val = ValueTracker(8) m = Dot()", "q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]])", "[k, k_label, pk, mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on( k.get_center(), focus.get_center() ))", "m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth =", "focusLabel]]) q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\", "a.plot_depth = 1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a,", "q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot()", "m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF))", "p2_label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1)", "m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value() ))) m_label = TexMobject('M').scale(0.5)", "self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT)", "Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel =", "q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT,", "1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p,", "form = TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf", "))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 =", "pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value,", "ShowCreation(x_label)) y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\", "focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in", "1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth =", "{ 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix", "buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label", "))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\", "[k1, k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\", "TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1", "m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot()", "TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth", "m.put_start_and_end_on( self.right(p1, q1), self.chord_to_directrix(p1, q1) )) p2q2 = Line() p2q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2,", "m:\\ m.next_to(t, RIGHT + UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point(", "pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label))", "q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5)", "ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label =", "3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG = { 'focus': 2, 'x_min': -4", "Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q =", "p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point(", "q1q2]]) p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\", "m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\", "e in \\ [k2, k2_label, p2q1, p1q2]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on(", "TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(),", "Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e", "1 a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val", "= TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on(", "* vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2))", "self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p), ShowCreation(p_label)) self.play(ShowCreation(pt)) self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt,", "k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label =", "get_pf_extent(): vec = focus.get_center() - p.get_center() vec = normalize(vec) return focus.get_center() + 2", ": -5 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus", "p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda", "= Line() p1q2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e", "m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP)", "TexMobject('K_2').scale(0.5) k2_label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) p2q1 = Line() p2q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2,", "self.wait(10) class Prob2(Parabola): CONFIG = { 'focus': 2, 'x_min': -4 } def construct(self):", "self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1 focusLabel = TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT)", "p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord =", "self.play(*[ShowCreation(e) for e in \\ [k1, k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN)", "= CText('这些交点在准线上').scale(0.3) explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3),", "self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola):", "= TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line() pk.add_updater(lambda l:\\ l.put_start_and_end_on( p.get_center(),", "m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF)) l1 =", "q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label = TexMobject('Q_2').scale(0.5) q2_label.add_updater(lambda m:\\ m.next_to(q2, RIGHT,", "ValueTracker(8) m = Dot() m.set_fill(DARK_BLUE) m.plot_depth = 1 m.add_updater(lambda m:\\ m.move_to(self.coords_to_point( -self.focus, y_val.get_value()", "TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on(", "self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to(", "self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else self.coords_to_point(0, 0) )) inter_label", "Line() mf.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), focus.get_center() )) self.play(ShowCreation(kf), ShowCreation(mf)) form = TexMobject('KF \\\\perp", "))) m_label = TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth", "inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT) self.play(Write(form))", "directrix, focus, focusLabel]]) h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1", "self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0", "1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else", "ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) p1.plot_depth =", "p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda m:\\ m.next_to(k, LEFT)) pk = Line()", "t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT))", "= TexMobject('F').scale(0.7) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) q1_y", "ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label =", "ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def = CText('焦点弦') fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1", "focus, focusLabel]]) h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus,", "RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q), ShowCreation(q_label)) label2 = CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN)", "self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\", "self.right(p, inter), x.get_center() )) self.play(ShowCreation(px)) self.play(ShowCreation(inter), ShowCreation(inter_label)) self.wait() form = CText(\"P'Q经过焦点\").shift(UP) form.scale(0.5) form.to_edge(RIGHT)", "TexMobject('KF \\\\perp MF') form.scale(0.7) form.to_edge(RIGHT) self.play(Write(form)) af = DashedLine(a.get_center(), focus.get_center()) pf = DashedLine()", "self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5)", "self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 }", "q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2)", "1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q))) k1Label = TexMobject('K_1').scale(0.7) k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF))", "self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob3(Parabola): CONFIG", "e in [k1, k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(),", "m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]]) k1 =", "l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel))", "+ 2 * vec pf.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), get_pf_extent() )) self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3)", "for e in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE)", "= TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq = Line() pq.add_updater(lambda m:\\", "k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5)", "x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0)) x_label = TexMobject('X').scale(0.5) x_label.next_to(x,", "e in \\ [k1, k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth =", "= Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label))", "q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT,", "LEFT + UP, buff=SMALL_BUFF)) px = Line() px.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p, inter), x.get_center()", "m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2 = Dot() p2.set_fill(DARK_BLUE) p2.plot_depth = 1", "-y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 )))", "p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) p2 =", "self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) h_line = self.get_horizontal() x =", "q), self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p2, q), self.chord_to_directrix(p2,", "))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m,", "m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT + UP)) pq =", "self.play(ShowCreation(interq)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -9)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 9))", "fc_def.move_to(focus_chord.get_center()) fc_def.shift(0.2 * RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y =", "= Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k,", "K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3)", "= ValueTracker(2) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()),", "Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label = TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda", "TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]])", "self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG", "Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on(", "Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord)) self.play(ShowCreation(p2), ShowCreation(p2Label)) fc_def", "in\\ [q2, q2_label, q1q2]]) p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth =", "= { 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN)", "= Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda", "= 1 focusLabel = TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT + UP) self.play(*[ShowCreation(e) for\\ e in", ")) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth", "RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label))", "= 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point( 4*(self.focus**3)/(y_val.get_value()**2), 4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0", "\\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf = Line() qf.add_updater(lambda m:\\ m.put_start_and_end_on(q.get_center(), focus.get_center()))", "self.play(ShowCreation(af), ShowCreation(pf)) self.wait(3) self.play(ApplyMethod(y_val.set_value, 2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class", "q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot()", "Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label =", "m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value() ))) q = Dot() q.set_fill(DARK_BLUE) q.plot_depth = 1 q.add_updater(lambda m:\\", "k1Label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) k2 = Dot() k2.set_fill(BLUE_E) k2.plot_depth = 1 k2.add_updater(lambda", "directrix, focus, focusLabel]]) q1_y = ValueTracker(9) q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1", "buff=SMALL_BUFF)) p2 = Dot() p2.set_color(DARK_BLUE) p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2.plot_depth = 1 p2Label =", "1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2, q))) k2Label = TexMobject('K_2').scale(0.7) k2Label.add_updater(lambda m:\\ m.next_to(k2, LEFT, buff=SMALL_BUFF))", "m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) )) self.play(*[ShowCreation(e) for e in\\ [q2, q2_label, q1q2]]) p1_y =", "Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT)", "q1 = Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() )))", "focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain))", "m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2, p2_label, p1p2]]) k1", "p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) self.play(*[ShowCreation(e) for e in\\ [m, m_label, p, p_label]]) k", "self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1, p1q2]]) explain", "buff=SMALL_BUFF)) l1 = Line() l1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2", "TexMobject('K_1').scale(0.5) k1_label.add_updater(lambda m:\\ m.next_to(k1, LEFT, buff=SMALL_BUFF)) p1q1 = Line() p1q1.add_updater(lambda m:\\ m.put_start_and_end_on( self.right(p1,", "explain.to_edge(RIGHT) self.wait(2) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9))", "CONFIG = { 'focus': 3, 'x_min': -10 } def construct(self): self.adjust_x_range() graph =", "0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value, 3), ApplyMethod(p1_y.set_value, -9)) self.wait(10) class Prob4(Parabola): CONFIG =", "graph = self.get_graph(color=LIGHT_BROWN) directrix = self.get_directrix() focus = Dot().move_to(self.get_focus()) focus.set_fill(DARK_BROWN) focus.plot_depth = 1", "= TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t, RIGHT", "m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f),", "buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(ShowCreation(p1), ShowCreation(p1Label)) self.play(ShowCreation(focus_chord))", "self.wait(2) self.play(ApplyMethod(y_val.set_value, 4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -4)) self.wait(2) self.play(ApplyMethod(y_val.set_value, -9)) self.wait(2) self.play(ApplyMethod(y_val.set_value, 9)) self.wait(10)", "y_val.get_value() ))) p1.plot_depth = 1 p1Label = TexMobject('P_1').scale(0.7) p1Label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF))", "fc_def.shift(0.2 * RIGHT + 0.1 * DOWN) self.play(Write(fc_def)) self.wait(2) self.play(FadeOut(fc_def)) q_y = ValueTracker(2)", "= 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label = TexMobject('P').scale(0.5) p_label.add_updater(lambda m:\\", "-8)) self.wait(10) class Prob5(Parabola): CONFIG = { 'focus': 3, 'x_min': -10 } def", "[m, m_label, p, p_label]]) k = Dot() k.set_fill(DARK_BLUE) k.plot_depth = 1 k.add_updater(lambda m:\\", "Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), 0 ))) p_label =", "m:\\ m.move_to(self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) t = Dot() t.set_fill(DARK_BLUE) t.plot_depth = 1 t.add_updater(lambda", "focus.get_center() )) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT)", "l:\\ l.put_start_and_end_on( p.get_center(), self.chord_to_directrix(p, a) )) mp = Line() mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(),", "y_val = ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2, k1Label, k2Label]]) k1f = Line() k1f.add_updater(lambda", "TexMobject('F').scale(0.5) focusLabel.next_to(focus, RIGHT) self.play(*[ShowCreation(e) for\\ e in [graph, directrix, focus, focusLabel]]) a =", "self.play(ApplyMethod(y_val.set_value, -2)) self.wait(3) self.play(ApplyMethod(y_val.set_value, -8)) self.wait(10) class Prob5(Parabola): CONFIG = { 'focus': 3,", "self.wait(1) self.play(ApplyMethod(y_val.set_value, 5)) summary = TexMobject('K_1F \\\\perp K_2F').scale(2) summary.to_edge(RIGHT) self.wait(1) self.play(Write(summary)) self.wait(5) qf", "qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth", "= TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\ m.next_to(q, LEFT, buff=SMALL_BUFF)) k1 = Dot() k1.set_fill(BLUE_E) k1.plot_depth =", "= Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e in\\ [p2,", "k2Label]]) k1f = Line() k1f.add_updater(lambda m:\\ m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line()", "self.play(ShowCreation(t), ShowCreation(t_label)) label1 = CText('纵标线').scale(0.3)\\ .next_to(pt, RIGHT) self.play(ShowCreation(label1)) self.wait() self.play(FadeOut(label1)) self.play(ShowCreation(pq)) self.remove(pt) self.play(ShowCreation(q),", "1 p2Label = TexMobject('P_2').scale(0.7) p2Label.add_updater(lambda m:\\ m.next_to(p2, RIGHT, buff=SMALL_BUFF)) focus_chord = Line() focus_chord.add_updater(lambda", "4*self.focus**2/y_val.get_value() ) if y_val.get_value() != 0 else self.coords_to_point(0, 0) )) inter_label = TexMobject(\"P'\").scale(0.5)", "p2.set_fill(DARK_BLUE) p2.plot_depth = 1 p2.add_updater(lambda m:\\ m.move_to(self.get_opposite(p1))) p2_label = TexMobject('P_2').scale(0.5) p2_label.add_updater(lambda m:\\ m.next_to(p2,", "directrix, focus, focusLabel]]) y_val = ValueTracker(8) p1 = Dot() p1.set_color(DARK_BLUE) p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "m.next_to(p2, RIGHT, buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e)", "a_label = TexMobject('A').scale(0.5) a_label.next_to(a, RIGHT) self.play(*[ShowCreation(e) for e in [a, a_label]]) y_val =", "buff=SMALL_BUFF)) p1p2 = Line() p1p2.add_updater(lambda m:\\ m.put_start_and_end_on( p1.get_center(), self.get_opposite(p1) )) self.play(*[ShowCreation(e) for e", "= 1 q.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q_y.get_value()), q_y.get_value() ))) qLabel = TexMobject('Q').scale(0.7) qLabel.add_updater(lambda m:\\", "))) pt = Line() pt.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(y_val.get_value()), 0 ))) self.play(ShowCreation(p),", "q1_label.add_updater(lambda m:\\ m.next_to(q1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth =", "self.play(ShowCreation(q1), ShowCreation(q1_label)) q2 = Dot() q2.set_fill(DARK_BLUE) q2.plot_depth = 1 q2.add_updater(lambda m:\\ m.move_to(self.get_opposite(q1))) q2_label", "for e in\\ [k, k_label, pk, mp]]) kf = Line() kf.add_updater(lambda l:\\ l.put_start_and_end_on(", "explain.to_edge(RIGHT) self.wait(2) self.play(ShowCreation(k1f), ShowCreation(k2f)) self.wait(3) self.play(Write(explain)) self.wait(5) self.play(ApplyMethod(q1_y.set_value, 0.5), ApplyMethod(p1_y.set_value, -3)) self.wait(3) self.play(ApplyMethod(q1_y.set_value,", "k.plot_depth = 1 k.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix( p, a ))) k_label = TexMobject('K').scale(0.5) k_label.add_updater(lambda", "m:\\ m.put_start_and_end_on( self.right(p1, q), self.chord_to_directrix(p1, q) )) l2 = Line() l2.add_updater(lambda m:\\ m.put_start_and_end_on(", "self.chord_to_directrix(p2, q) )) self.play(ShowCreation(q), ShowCreation(qLabel)) self.play(ShowCreation(l1), ShowCreation(l2)) self.play(*[ShowCreation(e) for e in [k1, k2,", "ValueTracker(8) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(y_val.get_value()), y_val.get_value()", "self.right(p1, q2), self.chord_to_directrix(p1, q2) )) self.play(*[ShowCreation(e) for e in \\ [k2, k2_label, p2q1,", "k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() )) explain = TexMobject('K_1F \\\\perp K_2F') explain.to_edge(RIGHT) self.wait(2)", "k1_label, p1q1, p2q2]]) k2 = Dot() k2.set_fill(DARK_BROWN) k2.plot_depth = 1 k2.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p2,", "m.move_to(self.coords_to_point( self.func(p1_y.get_value()), p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1),", "p_label.add_updater(lambda m:\\ m.next_to(p, RIGHT)) q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label =", "UP)) pq = Line() pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt", "vec = focus.get_center() - p.get_center() vec = normalize(vec) return focus.get_center() + 2 *", "mp.add_updater(lambda l:\\ l.put_start_and_end_on( m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk,", "m.get_center(), p.get_center() )) self.play(*[ShowCreation(e) for e in\\ [k, k_label, pk, mp]]) kf =", "p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda m:\\ m.move_to(self.chord_to_directrix(p1, q1))) k1_label", "= DashedLine() def get_pf_extent(): vec = focus.get_center() - p.get_center() vec = normalize(vec) return", "e in\\ [p2, p2_label, p1p2]]) k1 = Dot() k1.set_fill(DARK_BROWN) k1.plot_depth = 1 k1.add_updater(lambda", "self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth = 1 inter.add_updater(lambda m:\\ m.move_to( self.coords_to_point(", "= { 'focus': 2, 'x_min': -4 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN)", "q_label = TexMobject('Q').scale(0.5) q_label.add_updater(lambda m:\\ m.next_to(q, RIGHT)) t_label = TexMobject('T').scale(0.5) t_label.add_updater(lambda m:\\ m.next_to(t,", "CText('双纵标线').scale(0.3)\\ .next_to(t, RIGHT+DOWN) self.play(ShowCreation(label2)) self.wait() self.play(FadeOut(label2)) self.wait() inter = Dot() inter.set_fill(DARK_BLUE) inter.plot_depth =", "focusLabel]]) h_line = self.get_horizontal() x = Dot() x.set_fill(DARK_BROWN) x.plot_depth = 1 x.move_to(self.coords_to_point(-self.focus, 0))", "TexMobject('M').scale(0.5) m_label.add_updater(lambda l:\\ l.next_to(m, LEFT)) p = Dot() p.set_fill(DARK_BLUE) p.plot_depth = 1 p.add_updater(lambda", "m.put_start_and_end_on( k1.get_center(), focus.get_center() )) k2f = Line() k2f.add_updater(lambda m:\\ m.put_start_and_end_on( k2.get_center(), focus.get_center() ))", "p1_y = ValueTracker(2) p1 = Dot() p1.set_fill(DARK_BLUE) p1.plot_depth = 1 p1.add_updater(lambda m:\\ m.move_to(self.coords_to_point(", "m:\\ m.next_to(q2, RIGHT, buff=SMALL_BUFF)) q1q2 = Line() q1q2.add_updater(lambda m:\\ m.put_start_and_end_on( q1.get_center(), self.get_opposite(q1) ))", "q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label = TexMobject('Q_1').scale(0.5) q1_label.add_updater(lambda", "= Dot() q1.set_fill(DARK_BLUE) q1.plot_depth = 1 q1.add_updater(lambda m:\\ m.move_to(self.coords_to_point( self.func(q1_y.get_value()), q1_y.get_value() ))) q1_label", "CONFIG = { 'x_min' : -5 } def construct(self): self.adjust_x_range() graph = self.get_graph(color=LIGHT_BROWN)", "pq.add_updater(lambda m:\\ m.put_start_and_end_on( p.get_center(), self.coords_to_point( self.func(-y_val.get_value()), -y_val.get_value() ))) pt = Line() pt.add_updater(lambda m:\\", "q2), self.chord_to_directrix(p2, q2) )) self.play(*[ShowCreation(e) for e in \\ [k1, k1_label, p1q1, p2q2]])", "p1_y.get_value() ))) p1_label = TexMobject('P_1').scale(0.5) p1_label.add_updater(lambda m:\\ m.next_to(p1, RIGHT, buff=SMALL_BUFF)) self.play(ShowCreation(p1), ShowCreation(p1_label)) p2" ]
[ "'__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node: name: str email:", "__getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\"", "return cls if __name__ == '__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix", "name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为 value 包装成 URIRef :return:", "name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef:", "pyfuseki import config import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib", "attrs: setattr(cls, k, NameSpace(local_prefix + k + '/')) return cls if __name__ ==", "if __name__ == '__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node:", "= cls.__annotations__.keys() for k in attrs: setattr(cls, k, NameSpace(local_prefix + k + '/'))", "并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name)", "local_prefix: str = None): if local_prefix is None: local_prefix = config.COMMON_PREFIX attrs =", "def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri = str(self) if", "cls.__annotations__.keys() for k in attrs: setattr(cls, k, NameSpace(local_prefix + k + '/')) return", "class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) ->", "not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self)", "= uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str = None): if local_prefix", "is None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in attrs: setattr(cls,", "k, NameSpace(local_prefix + k + '/')) return cls if __name__ == '__main__': a", "cls if __name__ == '__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class", "= NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node: name: str email: str n", "uri = str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type,", "-> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name)", "uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为 value 包装成 URIRef", "def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef:", "uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str = None): if", "\"\"\" uri = str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls:", "local_prefix is None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in attrs:", "URIRef :return: \"\"\" if name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri =", "NameSpace(local_prefix + k + '/')) return cls if __name__ == '__main__': a =", "来作为 value 包装成 URIRef :return: \"\"\" if name not in name_to_uri: name_to_uri[name] =", "if name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri])", "+ k + '/')) return cls if __name__ == '__main__': a = NameSpace('http://www.google.com/person/')", "\"\"\" def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) ->", "uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str = None):", "的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def", "return super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id", "13:04 @Author: @File: rf_prefix.py \"\"\" import rdflib from pyfuseki import config import uuid", "rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为 value 包装成 URIRef :return: \"\"\" if", "None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in attrs: setattr(cls, k,", "import config import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的", "= dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self,", "uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str = None): if local_prefix is", "from pyfuseki import config import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承", "id 来作为 value 包装成 URIRef :return: \"\"\" if name not in name_to_uri: name_to_uri[name]", "以 uuid 生成一个唯一 id 来作为 value 包装成 URIRef :return: \"\"\" if name not", "return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self,", "NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node: name: str email: str n =", "name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\"", "@Time: 2021/9/18 13:04 @Author: @File: rf_prefix.py \"\"\" import rdflib from pyfuseki import config", "super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为", "import rdflib from pyfuseki import config import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace):", "包装成 URIRef :return: \"\"\" if name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri", ":return: \"\"\" uri = str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def", "rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def", "super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name)", "rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一", "@File: rf_prefix.py \"\"\" import rdflib from pyfuseki import config import uuid name_to_uri =", "@Author: @File: rf_prefix.py \"\"\" import rdflib from pyfuseki import config import uuid name_to_uri", "None): if local_prefix is None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k", "self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name) ->", "-> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri = str(self) if uri.endswith('/'): uri", "def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef:", "str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str", "rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str = None): if local_prefix is None: local_prefix", "= a.to_uri() @rdf_prefix class Node: name: str email: str n = Node() print(n.name['yubin'])", "value 包装成 URIRef :return: \"\"\" if name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1())", "k + '/')) return cls if __name__ == '__main__': a = NameSpace('http://www.google.com/person/') b", "rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri = str(self)", "-> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为 value 包装成 URIRef :return: \"\"\"", "\"\"\" 将自身转换成 URIRef :return: \"\"\" uri = str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')]", "if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str =", "def rdf_prefix(cls: type, local_prefix: str = None): if local_prefix is None: local_prefix =", "继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace,", "str = None): if local_prefix is None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys()", "__getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return", "rdf_prefix(cls: type, local_prefix: str = None): if local_prefix is None: local_prefix = config.COMMON_PREFIX", "to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri = str(self) if uri.endswith('/'):", "name) -> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\" 以", "name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def", "name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri", "return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri =", "\"\"\" 以 uuid 生成一个唯一 id 来作为 value 包装成 URIRef :return: \"\"\" if name", "= None): if local_prefix is None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for", "<reponame>yubinCloud/pyfuseki \"\"\" @Time: 2021/9/18 13:04 @Author: @File: rf_prefix.py \"\"\" import rdflib from pyfuseki", "if local_prefix is None: local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in", "dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key)", "in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) ->", "uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return:", "rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key)", "uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\"", "setattr(cls, k, NameSpace(local_prefix + k + '/')) return cls if __name__ == '__main__':", "= config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in attrs: setattr(cls, k, NameSpace(local_prefix +", "\"\"\" if name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return", "k in attrs: setattr(cls, k, NameSpace(local_prefix + k + '/')) return cls if", "name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def", "= str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix:", "URIRef :return: \"\"\" uri = str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return rdflib.URIRef(uri)", "str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef", "-> rdflib.URIRef: return super(NameSpace, self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid", "attrs = cls.__annotations__.keys() for k in attrs: setattr(cls, k, NameSpace(local_prefix + k +", "+ '/')) return cls if __name__ == '__main__': a = NameSpace('http://www.google.com/person/') b =", "key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self, name) -> rdflib.URIRef: return super(NameSpace,", "= name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\"", "生成一个唯一 id 来作为 value 包装成 URIRef :return: \"\"\" if name not in name_to_uri:", "in attrs: setattr(cls, k, NameSpace(local_prefix + k + '/')) return cls if __name__", "config import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace", "\"\"\" @Time: 2021/9/18 13:04 @Author: @File: rf_prefix.py \"\"\" import rdflib from pyfuseki import", "__name__ == '__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node: name:", "local_prefix = config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in attrs: setattr(cls, k, NameSpace(local_prefix", "type, local_prefix: str = None): if local_prefix is None: local_prefix = config.COMMON_PREFIX attrs", "rf_prefix.py \"\"\" import rdflib from pyfuseki import config import uuid name_to_uri = dict()", "a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node: name: str email: str", "import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能", ":return: \"\"\" if name not in name_to_uri: name_to_uri[name] = str(uuid.uuid1()) uri = name_to_uri[name]", "NameSpace(rdflib.Namespace): \"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef:", "\"\"\" import rdflib from pyfuseki import config import uuid name_to_uri = dict() class", "rdflib.URIRef: \"\"\" 将自身转换成 URIRef :return: \"\"\" uri = str(self) if uri.endswith('/'): uri =", "self).__getattr__(name) def uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为 value", "config.COMMON_PREFIX attrs = cls.__annotations__.keys() for k in attrs: setattr(cls, k, NameSpace(local_prefix + k", "b = a.to_uri() @rdf_prefix class Node: name: str email: str n = Node()", "rdflib from pyfuseki import config import uuid name_to_uri = dict() class NameSpace(rdflib.Namespace): \"\"\"", "uuid 生成一个唯一 id 来作为 value 包装成 URIRef :return: \"\"\" if name not in", "for k in attrs: setattr(cls, k, NameSpace(local_prefix + k + '/')) return cls", "== '__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri() @rdf_prefix class Node: name: str", "将自身转换成 URIRef :return: \"\"\" uri = str(self) if uri.endswith('/'): uri = uri[:uri.rfind('/')] return", "return rdflib.URIRef(uri) def rdf_prefix(cls: type, local_prefix: str = None): if local_prefix is None:", "\"\"\" 继承 rdflib 的 Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef: return", "def uid(self, name) -> rdflib.URIRef: \"\"\" 以 uuid 生成一个唯一 id 来作为 value 包装成", "Namespace 并扩充其他相关的功能 \"\"\" def __getitem__(self, key) -> rdflib.URIRef: return super(NameSpace, self).__getitem__(key) def __getattr__(self,", "2021/9/18 13:04 @Author: @File: rf_prefix.py \"\"\" import rdflib from pyfuseki import config import", "'/')) return cls if __name__ == '__main__': a = NameSpace('http://www.google.com/person/') b = a.to_uri()", "= str(uuid.uuid1()) uri = name_to_uri[name] return rdflib.URIRef(self[uri]) def to_uri(self) -> rdflib.URIRef: \"\"\" 将自身转换成" ]
[ "de cada vez, para cada valor digitado pelo usuário. O programa será interrompido", "o programa...\\033[m') sleep(1) break else: for c in range (0,11): print(f'{n} x {c}", "0 cont = 0 while n >= 0: print('--' * 15) print('\\033[33mPara cancelar,", "saber a tabuada ? ')) print('--' * 15) if n < 0: print('\\033[31mFinalizando", "0: print('--' * 15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n = int(input('Qual", "'''faça um programa que mostre a tabuada de vários números, um de cada", "int(input('Qual número deseja saber a tabuada ? ')) print('--' * 15) if n", "cont = 0 while n >= 0: print('--' * 15) print('\\033[33mPara cancelar, digite", ">= 0: print('--' * 15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n =", "cancelar, digite um número negativo.\\033[m') n = int(input('Qual número deseja saber a tabuada", "= 0 cont = 0 while n >= 0: print('--' * 15) print('\\033[33mPara", "tabuada ? ')) print('--' * 15) if n < 0: print('\\033[31mFinalizando o programa...\\033[m')", "sleep(1) break else: for c in range (0,11): print(f'{n} x {c} = {n*c}')", "programa...\\033[m') sleep(1) break else: for c in range (0,11): print(f'{n} x {c} =", "um programa que mostre a tabuada de vários números, um de cada vez,", "= int(input('Qual número deseja saber a tabuada ? ')) print('--' * 15) if", "< 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for c in range (0,11):", "um de cada vez, para cada valor digitado pelo usuário. O programa será", "cada vez, para cada valor digitado pelo usuário. O programa será interrompido quando", "será interrompido quando o valor solicitado for negativo.''' from time import sleep n", "solicitado for negativo.''' from time import sleep n = 0 cont = 0", "O programa será interrompido quando o valor solicitado for negativo.''' from time import", "digitado pelo usuário. O programa será interrompido quando o valor solicitado for negativo.'''", "15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n = int(input('Qual número deseja saber", "programa será interrompido quando o valor solicitado for negativo.''' from time import sleep", "cada valor digitado pelo usuário. O programa será interrompido quando o valor solicitado", "programa que mostre a tabuada de vários números, um de cada vez, para", "if n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for c in", "n >= 0: print('--' * 15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n", "n = int(input('Qual número deseja saber a tabuada ? ')) print('--' * 15)", "o valor solicitado for negativo.''' from time import sleep n = 0 cont", "n = 0 cont = 0 while n >= 0: print('--' * 15)", "* 15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n = int(input('Qual número deseja", "negativo.\\033[m') n = int(input('Qual número deseja saber a tabuada ? ')) print('--' *", "? ')) print('--' * 15) if n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1)", "negativo.''' from time import sleep n = 0 cont = 0 while n", "usuário. O programa será interrompido quando o valor solicitado for negativo.''' from time", "quando o valor solicitado for negativo.''' from time import sleep n = 0", "para cada valor digitado pelo usuário. O programa será interrompido quando o valor", "valor solicitado for negativo.''' from time import sleep n = 0 cont =", "interrompido quando o valor solicitado for negativo.''' from time import sleep n =", "digite um número negativo.\\033[m') n = int(input('Qual número deseja saber a tabuada ?", "número negativo.\\033[m') n = int(input('Qual número deseja saber a tabuada ? ')) print('--'", "')) print('--' * 15) if n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break", "deseja saber a tabuada ? ')) print('--' * 15) if n < 0:", "print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for c in range (0,11): print(f'{n} x", "vez, para cada valor digitado pelo usuário. O programa será interrompido quando o", "n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for c in range", "tabuada de vários números, um de cada vez, para cada valor digitado pelo", "15) if n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for c", "vários números, um de cada vez, para cada valor digitado pelo usuário. O", "sleep n = 0 cont = 0 while n >= 0: print('--' *", "* 15) if n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for", "import sleep n = 0 cont = 0 while n >= 0: print('--'", "print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n = int(input('Qual número deseja saber a", "time import sleep n = 0 cont = 0 while n >= 0:", "números, um de cada vez, para cada valor digitado pelo usuário. O programa", "= 0 while n >= 0: print('--' * 15) print('\\033[33mPara cancelar, digite um", "a tabuada ? ')) print('--' * 15) if n < 0: print('\\033[31mFinalizando o", "pelo usuário. O programa será interrompido quando o valor solicitado for negativo.''' from", "for negativo.''' from time import sleep n = 0 cont = 0 while", "print('--' * 15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m') n = int(input('Qual número", "que mostre a tabuada de vários números, um de cada vez, para cada", "mostre a tabuada de vários números, um de cada vez, para cada valor", "from time import sleep n = 0 cont = 0 while n >=", "valor digitado pelo usuário. O programa será interrompido quando o valor solicitado for", "a tabuada de vários números, um de cada vez, para cada valor digitado", "while n >= 0: print('--' * 15) print('\\033[33mPara cancelar, digite um número negativo.\\033[m')", "um número negativo.\\033[m') n = int(input('Qual número deseja saber a tabuada ? '))", "número deseja saber a tabuada ? ')) print('--' * 15) if n <", "de vários números, um de cada vez, para cada valor digitado pelo usuário.", "print('--' * 15) if n < 0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else:", "0: print('\\033[31mFinalizando o programa...\\033[m') sleep(1) break else: for c in range (0,11): print(f'{n}", "0 while n >= 0: print('--' * 15) print('\\033[33mPara cancelar, digite um número" ]
[ "Reset the config file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def build_command(name,", "datafile) def build_command(name, url): \"\"\" Build a click command according the arguments. :param", "will be opened. :rtype: click.Command \"\"\" return click.Command( name, callback=lambda: open_url(url), help='Open {}'.format(url)", "mode the file with be opened with. Default: r :return: the file object.", "json.dump({}, datafile) def build_command(name, url): \"\"\" Build a click command according the arguments.", "url that will be opened. :rtype: click.Command \"\"\" return click.Command( name, callback=lambda: open_url(url),", "url: the url that will be opened. :rtype: click.Command \"\"\" return click.Command( name,", "return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the file storing the commands. :param", "file with be opened with. Default: r :return: the file object. :rtype: file", "as datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with open(get_config_file_path(),", "crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return", "file \"\"\" path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path, mode) as", "url): \"\"\" Build a click command according the arguments. :param str name: label", "commands. :param str mode: the mode the file with be opened with. Default:", "trigger the command. :param str url: the url that will be opened. :rtype:", "config file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def build_command(name, url): \"\"\"", "def get_config_file(mode='r'): \"\"\" Return the file storing the commands. :param str mode: the", "str mode: the mode the file with be opened with. Default: r :return:", "-*- coding: utf-8 -*- import json import os from contextlib import contextmanager import", "def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home))", "contextlib import contextmanager import click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url)", "bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\"", "Return the file storing the commands. :param str mode: the mode the file", "click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return", "build_command(name, url): \"\"\" Build a click command according the arguments. :param str name:", "import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\")", "os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset", "utf-8 -*- import json import os from contextlib import contextmanager import click import", "name: label that the user will use to trigger the command. :param str", "command. :param str url: the url that will be opened. :rtype: click.Command \"\"\"", "with. Default: r :return: the file object. :rtype: file \"\"\" path = get_config_file_path()", "-*- import json import os from contextlib import contextmanager import click import crayons", "a click command according the arguments. :param str name: label that the user", "str name: label that the user will use to trigger the command. :param", "with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def build_command(name, url): \"\"\" Build a", "the arguments. :param str name: label that the user will use to trigger", ":param str url: the url that will be opened. :rtype: click.Command \"\"\" return", "def build_command(name, url): \"\"\" Build a click command according the arguments. :param str", "with be opened with. Default: r :return: the file object. :rtype: file \"\"\"", "python # -*- coding: utf-8 -*- import json import os from contextlib import", "be opened. :rtype: click.Command \"\"\" return click.Command( name, callback=lambda: open_url(url), help='Open {}'.format(url) )", "the file storing the commands. :param str mode: the mode the file with", "user will use to trigger the command. :param str url: the url that", "contextmanager import click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path():", "import os from contextlib import contextmanager import click import crayons def open_url(url): click.echo(\"Opening", "be opened with. Default: r :return: the file object. :rtype: file \"\"\" path", "not os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile: yield datafile def generate_empty_config_file(): \"\"\"", "os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the file storing the commands. :param str", "with open(path, mode) as datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset the config", "arguments. :param str name: label that the user will use to trigger the", "label that the user will use to trigger the command. :param str url:", "os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the file storing the commands.", "click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home =", "generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile)", "mode: the mode the file with be opened with. Default: r :return: the", "@contextmanager def get_config_file(mode='r'): \"\"\" Return the file storing the commands. :param str mode:", "use to trigger the command. :param str url: the url that will be", "the commands. :param str mode: the mode the file with be opened with.", ":rtype: file \"\"\" path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path, mode)", "open(path, mode) as datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\"", "= os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the file storing the", "object. :rtype: file \"\"\" path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path,", "the url that will be opened. :rtype: click.Command \"\"\" return click.Command( name, callback=lambda:", "get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the file", "import json import os from contextlib import contextmanager import click import crayons def", "yield datafile def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with open(get_config_file_path(), 'w') as", "the config file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def build_command(name, url):", "\"\"\" Reset the config file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def", "{}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'):", "generate_empty_config_file() with open(path, mode) as datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset the", "json import os from contextlib import contextmanager import click import crayons def open_url(url):", "\"\"\" Return the file storing the commands. :param str mode: the mode the", "Default: r :return: the file object. :rtype: file \"\"\" path = get_config_file_path() if", "the file object. :rtype: file \"\"\" path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file()", "os from contextlib import contextmanager import click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url,", "def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the", "from contextlib import contextmanager import click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True)))", "home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def get_config_file(mode='r'): \"\"\" Return the file storing", "def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({},", "datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with open(get_config_file_path(), 'w')", ":param str mode: the mode the file with be opened with. Default: r", "click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager def", "the user will use to trigger the command. :param str url: the url", "the mode the file with be opened with. Default: r :return: the file", "import contextmanager import click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def", "get_config_file(mode='r'): \"\"\" Return the file storing the commands. :param str mode: the mode", "get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile: yield datafile def", "path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile: yield", "open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def build_command(name, url): \"\"\" Build a click", "click command according the arguments. :param str name: label that the user will", "the file with be opened with. Default: r :return: the file object. :rtype:", "datafile: json.dump({}, datafile) def build_command(name, url): \"\"\" Build a click command according the", ":param str name: label that the user will use to trigger the command.", "command according the arguments. :param str name: label that the user will use", "# -*- coding: utf-8 -*- import json import os from contextlib import contextmanager", "Build a click command according the arguments. :param str name: label that the", "that the user will use to trigger the command. :param str url: the", "str url: the url that will be opened. :rtype: click.Command \"\"\" return click.Command(", "r :return: the file object. :rtype: file \"\"\" path = get_config_file_path() if not", "according the arguments. :param str name: label that the user will use to", "file.\"\"\" with open(get_config_file_path(), 'w') as datafile: json.dump({}, datafile) def build_command(name, url): \"\"\" Build", "import click import crayons def open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home", "\"\"\" Build a click command according the arguments. :param str name: label that", "#!/usr/bin/env python # -*- coding: utf-8 -*- import json import os from contextlib", "\"\"\" path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile:", "datafile def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with open(get_config_file_path(), 'w') as datafile:", "opened with. Default: r :return: the file object. :rtype: file \"\"\" path =", "the command. :param str url: the url that will be opened. :rtype: click.Command", "open_url(url): click.echo(\"Opening {}.\".format(crayons.white(url, bold=True))) click.launch(url) def get_config_file_path(): home = os.path.expanduser(\"~\") return os.path.realpath('{}/.commands.json'.format(home)) @contextmanager", "coding: utf-8 -*- import json import os from contextlib import contextmanager import click", "mode) as datafile: yield datafile def generate_empty_config_file(): \"\"\" Reset the config file.\"\"\" with", "file object. :rtype: file \"\"\" path = get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with", ":return: the file object. :rtype: file \"\"\" path = get_config_file_path() if not os.path.exists(path):", "that will be opened. :rtype: click.Command \"\"\" return click.Command( name, callback=lambda: open_url(url), help='Open", "if not os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile: yield datafile def generate_empty_config_file():", "to trigger the command. :param str url: the url that will be opened.", "file storing the commands. :param str mode: the mode the file with be", "as datafile: json.dump({}, datafile) def build_command(name, url): \"\"\" Build a click command according", "'w') as datafile: json.dump({}, datafile) def build_command(name, url): \"\"\" Build a click command", "will use to trigger the command. :param str url: the url that will", "storing the commands. :param str mode: the mode the file with be opened", "= get_config_file_path() if not os.path.exists(path): generate_empty_config_file() with open(path, mode) as datafile: yield datafile" ]
[ "query: LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T:", "query: SumQuery) -> T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) -> T:", "AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class", "GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self:", "class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self:", "def compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T: if not isinstance(query, Query): return", "AndGroupQuery) -> T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise", "NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery)", "NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\",", "def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query:", "def AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query:", "def OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query:", "NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery)", "T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError() def", "\"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) ->", "NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\",", "from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery,", "NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError() def OR(self: \"QueryCompiler\",", "raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError() def SUM(self:", "from tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery,", "T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError() def", "LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery)", "SumQuery) -> T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise", "OrGroupQuery) -> T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T:", "OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any,", "Union T = TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query import", "raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self:", "TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery,", "query: LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T:", "query: RowCountQuery) -> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) -> T:", "query: AndGroupQuery) -> T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) -> T:", "T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError() def", "T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError() def", "query: EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T:", "typing import Any, Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query import", ") class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError() def", "def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query:", "raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self:", "EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise", "LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise", "EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]):", "OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise", "from __future__ import annotations from typing import Any, Generic, TYPE_CHECKING, TypeVar, Union T", "GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery)", "query: NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T:", "AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, )", "T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError() def", "NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\",", "T = TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query import (", "def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query:", "def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query:", "tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery,", "\"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) ->", "raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self:", "( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery,", "-> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError()", "import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery,", "if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery,", "T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError() def", "T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError() def", "-> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError()", "GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise", "AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery)", "query: OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T:", "query: GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T:", "query: Union[Any, Query]) -> T: if not isinstance(query, Query): return query return query.compile(self)", "T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T: if not", "AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise", "\"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) ->", "\"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query])", "OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T:", "T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError() def", "GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery)", "OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery)", "NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\",", "\"QueryCompiler\", query: Union[Any, Query]) -> T: if not isinstance(query, Query): return query return", "RowCountQuery) -> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise", "TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING: from", "def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query:", "import Query if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery,", "\"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) ->", "def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query:", "OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) ->", "from typing import Any, Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query", "\"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) ->", "compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T: if not isinstance(query, Query): return query", "import Any, Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query import Query", "raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self:", "-> T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T: if", "query: AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T:", "NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T: if not isinstance(query, Query):", "raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError() def compile(self:", "NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\",", "annotations from typing import Any, Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from", "\"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) ->", "query: OrGroupQuery) -> T: raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query]) ->", "NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\",", "AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery)", "-> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError()", "-> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError()", "def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query:", "NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\",", "\"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) ->", "TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery,", "QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\",", "raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self:", "\"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) ->", "NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\",", "def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query:", "RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise", "import annotations from typing import Any, Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\")", "LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query:", "<gh_stars>0 from __future__ import annotations from typing import Any, Generic, TYPE_CHECKING, TypeVar, Union", "Query if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery, GreaterThanQuery, LessEqualQuery,", "-> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError()", "raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError() def OR(self:", "-> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T: raise NotImplementedError()", "LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\",", "GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self: \"QueryCompiler\", query: GreaterEqualQuery) -> T: raise", "SumQuery, ) class QueryCompiler(Generic[T]): def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError()", "raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError() def GREATER_EQUAL(self:", "= TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery,", "\"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) ->", "EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery)", "-> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError()", "-> T: raise NotImplementedError() def AND_GROUP(self: \"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError()", "NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError() def AND(self: \"QueryCompiler\",", "LessThanQuery) -> T: raise NotImplementedError() def LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise", "SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery)", "LESS_EQUAL(self: \"QueryCompiler\", query: LessEqualQuery) -> T: raise NotImplementedError() def ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery)", "-> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError()", "query: GreaterEqualQuery) -> T: raise NotImplementedError() def LESS_THAN(self: \"QueryCompiler\", query: LessThanQuery) -> T:", "def EQUALS(self: \"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query:", "-> T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError()", "T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) -> T: raise NotImplementedError() def", "raise NotImplementedError() def compile(self: \"QueryCompiler\", query: Union[Any, Query]) -> T: if not isinstance(query,", "tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query import ( AndGroupQuery, AndQuery, EqualsQuery, GreaterEqualQuery,", "NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError() def compile(self: \"QueryCompiler\",", "-> T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query: AndQuery) -> T: raise NotImplementedError()", "__future__ import annotations from typing import Any, Generic, TYPE_CHECKING, TypeVar, Union T =", "T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise NotImplementedError() def", "Any, Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query import Query if", "raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self:", "def SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError() def AND(self: \"QueryCompiler\", query:", "TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING: from tanuki.data_store.query", "NotEqualsQuery) -> T: raise NotImplementedError() def GREATER_THAN(self: \"QueryCompiler\", query: GreaterThanQuery) -> T: raise", "\"QueryCompiler\", query: EqualsQuery) -> T: raise NotImplementedError() def NOT_EQUALS(self: \"QueryCompiler\", query: NotEqualsQuery) ->", "\"QueryCompiler\", query: AndGroupQuery) -> T: raise NotImplementedError() def OR(self: \"QueryCompiler\", query: OrQuery) ->", "Generic, TYPE_CHECKING, TypeVar, Union T = TypeVar(\"T\") from tanuki.data_store.query import Query if TYPE_CHECKING:", "raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery) -> T: raise NotImplementedError() def AND(self:", "GreaterEqualQuery, GreaterThanQuery, LessEqualQuery, LessThanQuery, NotEqualsQuery, OrGroupQuery, OrQuery, RowCountQuery, SumQuery, ) class QueryCompiler(Generic[T]): def", "ROW_COUNT(self: \"QueryCompiler\", query: RowCountQuery) -> T: raise NotImplementedError() def SUM(self: \"QueryCompiler\", query: SumQuery)", "T: raise NotImplementedError() def OR_GROUP(self: \"QueryCompiler\", query: OrGroupQuery) -> T: raise NotImplementedError() def" ]
[ "day from now def save_and_get_str(): # SAVE all_orders = [] since = exchange.milliseconds()", "'takerOrMaker']) for element in all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f')", "df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element in", "-1 day from now while since < exchange.milliseconds(): symbol = 'ETH/JPY' # change", "for your symbol limit = 100 # change for your limit orders =", "df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None)", "header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0])", "import pandas as pd from datetime import datetime, timedelta import operator import csv", "+= orders else: break df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee',", "',' + str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if", "os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a')", "datetime import datetime, timedelta import operator import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential)", "exchange.milliseconds() - 86400000 * 5 # -1 day from now while since <", "your symbol limit = 100 # change for your limit orders = exchange.fetch_my_trades(symbol,", "str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) + ',' + str(element['fee']) + ',' +", "= df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile:", "= pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element in all_orders:", "> 1: since = orders[len(orders) - 1]['timestamp'] all_orders += orders else: break df", "import os import pandas as pd from datetime import datetime, timedelta import operator", "'fee', 'takerOrMaker']) for element in all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d", "since, limit) if len(orders) > 1: since = orders[len(orders) - 1]['timestamp'] all_orders +=", "exchange = liquid since = exchange.milliseconds() - 86400000 # -1 day from now", "from now def save_and_get_str(): # SAVE all_orders = [] since = exchange.milliseconds() -", "now def save_and_get_str(): # SAVE all_orders = [] since = exchange.milliseconds() - 86400000", "= exchange.milliseconds() - 86400000 # -1 day from now def save_and_get_str(): # SAVE", "[] since = exchange.milliseconds() - 86400000 * 5 # -1 day from now", "import datetime, timedelta import operator import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange", "',' + str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) +", "trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content = df.to_csv(", "since < exchange.milliseconds(): symbol = 'ETH/JPY' # change for your symbol limit =", "str(float(trade['price'])) + ',' + str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") #", "sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while", "+ str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not", "os import pandas as pd from datetime import datetime, timedelta import operator import", "exchange.milliseconds(): symbol = 'ETH/JPY' # change for your symbol limit = 100 #", "pd from datetime import datetime, timedelta import operator import csv import cfg liquid", "trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) + ',' +", "str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else:", "limit) if len(orders) > 1: since = orders[len(orders) - 1]['timestamp'] all_orders += orders", "exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1: since = orders[len(orders) - 1]['timestamp'] all_orders", "if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None) with", "all_orders += orders else: break df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price',", "not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv',", "',' + str(trade_utc) + ',' + str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) +", "',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content =", "symbol limit = 100 # change for your limit orders = exchange.fetch_my_trades(symbol, since,", "orders else: break df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker'])", "ccxt import time import os import pandas as pd from datetime import datetime,", "print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while True: save_and_get_str() sort_csv() time.sleep(23", "',' + str(float(trade['price'])) + ',' + str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] =", "as pd from datetime import datetime, timedelta import operator import csv import cfg", "import operator import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since", "len(orders) > 1: since = orders[len(orders) - 1]['timestamp'] all_orders += orders else: break", "datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc) +", "+ ',' + str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) + ',' + str(float(trade['price']))", "df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content)", "since = exchange.milliseconds() - 86400000 * 5 # -1 day from now while", "pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element in all_orders: trade", "+ ',' + str(float(trade['price'])) + ',' + str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)]", "open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x =", "'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element in all_orders: trade = element['info'] trade_utc", "from now while since < exchange.milliseconds(): symbol = 'ETH/JPY' # change for your", "= str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc) + ',' + str(trade['my_side']) +", "all_orders = [] since = exchange.milliseconds() - 86400000 * 5 # -1 day", "= liquid since = exchange.milliseconds() - 86400000 # -1 day from now def", "def save_and_get_str(): # SAVE all_orders = [] since = exchange.milliseconds() - 86400000 *", "86400000 # -1 day from now def save_and_get_str(): # SAVE all_orders = []", "# change for your symbol limit = 100 # change for your limit", "%H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc) + ',' +", "= x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while True: save_and_get_str() sort_csv() time.sleep(23 * 60)", "1: since = orders[len(orders) - 1]['timestamp'] all_orders += orders else: break df =", "limit = 100 # change for your limit orders = exchange.fetch_my_trades(symbol, since, limit)", "pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while True: save_and_get_str() sort_csv()", "while since < exchange.milliseconds(): symbol = 'ETH/JPY' # change for your symbol limit", "csv_content = df.to_csv(index=False) else: csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as", "trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000))", "save_and_get_str(): # SAVE all_orders = [] since = exchange.milliseconds() - 86400000 * 5", "since = exchange.milliseconds() - 86400000 # -1 day from now def save_and_get_str(): #", "SAVE all_orders = [] since = exchange.milliseconds() - 86400000 * 5 # -1", "# SAVE all_orders = [] since = exchange.milliseconds() - 86400000 * 5 #", "df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x =", "= exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1: since = orders[len(orders) - 1]['timestamp']", "* 1000)) + ',' + str(trade_utc) + ',' + str(trade['my_side']) + ',' +", "= orders[len(orders) - 1]['timestamp'] all_orders += orders else: break df = pd.DataFrame( columns=['utc',", "str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc) + ',' + str(trade['my_side']) + ','", "x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while True: save_and_get_str() sort_csv() time.sleep(23 *", "+ str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) + ','", "= df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x", "change for your limit orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1:", "+ str(float(trade['price'])) + ',' + str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\")", "+ ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content", "# change for your limit orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders) >", "+ ',' + str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv')", "day from now while since < exchange.milliseconds(): symbol = 'ETH/JPY' # change for", "x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while True:", "float(trade['quantity']))) + ',' + str(float(trade['price'])) + ',' + str(element['fee']) + ',' + str(element['takerOrMaker'])", "timedelta import operator import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid", "orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1: since = orders[len(orders) -", "= element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) +", "import time import os import pandas as pd from datetime import datetime, timedelta", "* 5 # -1 day from now while since < exchange.milliseconds(): symbol =", "float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc) + ','", "= 'ETH/JPY' # change for your symbol limit = 100 # change for", "# df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content = df.to_csv( index=False,", "+ ',' + str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) + ',' + str(element['fee'])", "csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False)", "= [] since = exchange.milliseconds() - 86400000 * 5 # -1 day from", "else: break df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for", "def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted')", "= ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds() - 86400000 # -1 day", "element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) + ','", "your limit orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1: since =", "since = orders[len(orders) - 1]['timestamp'] all_orders += orders else: break df = pd.DataFrame(", "trades_to_append = str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc) + ',' + str(trade['my_side'])", "now while since < exchange.milliseconds(): symbol = 'ETH/JPY' # change for your symbol", "1000)) + ',' + str(trade_utc) + ',' + str(trade['my_side']) + ',' + str(abs(", "df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content", "symbol = 'ETH/JPY' # change for your symbol limit = 100 # change", "import ccxt import time import os import pandas as pd from datetime import", "for your limit orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1: since", "-1 day from now def save_and_get_str(): # SAVE all_orders = [] since =", "else: csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def", "'price', 'fee', 'takerOrMaker']) for element in all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp(", "import json import requests import ccxt import time import os import pandas as", "pandas as pd from datetime import datetime, timedelta import operator import csv import", "csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds() -", "element in all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append =", "+ str(trade_utc) + ',' + str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) + ','", "- 86400000 * 5 # -1 day from now while since < exchange.milliseconds():", "import requests import ccxt import time import os import pandas as pd from", "columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element in all_orders: trade =", "all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) *", "ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds() - 86400000 # -1 day from", "json import requests import ccxt import time import os import pandas as pd", "limit orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders) > 1: since = orders[len(orders)", "change for your symbol limit = 100 # change for your limit orders", "for element in all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append", "import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds() - 86400000", "exchange.milliseconds() - 86400000 # -1 day from now def save_and_get_str(): # SAVE all_orders", "import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds()", "- 1]['timestamp'] all_orders += orders else: break df = pd.DataFrame( columns=['utc', 'time', 'type',", "csv_content = df.to_csv( index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv():", "str(trade_utc) + ',' + str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) + ',' +", "requests import ccxt import time import os import pandas as pd from datetime", "= exchange.milliseconds() - 86400000 * 5 # -1 day from now while since", "'amount', 'price', 'fee', 'takerOrMaker']) for element in all_orders: trade = element['info'] trade_utc =", "datetime, timedelta import operator import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange =", "from datetime import datetime, timedelta import operator import csv import cfg liquid =", "index=False, header=None) with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\")", "100 # change for your limit orders = exchange.fetch_my_trades(symbol, since, limit) if len(orders)", "5 # -1 day from now while since < exchange.milliseconds(): symbol = 'ETH/JPY'", "+ str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) + ',' + str(element['fee']) + ','", "# -1 day from now def save_and_get_str(): # SAVE all_orders = [] since", "as csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False)", "csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv',", "if len(orders) > 1: since = orders[len(orders) - 1]['timestamp'] all_orders += orders else:", "in all_orders: trade = element['info'] trade_utc = datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at'])", "= trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False) else: csv_content =", "+ str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"): csv_content = df.to_csv(index=False)", "with open('transaction_liquid.csv', 'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x", "orders[len(orders) - 1]['timestamp'] all_orders += orders else: break df = pd.DataFrame( columns=['utc', 'time',", "'a') as csvfile: csvfile.write(csv_content) def sort_csv(): x = pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time',", "86400000 * 5 # -1 day from now while since < exchange.milliseconds(): symbol", "< exchange.milliseconds(): symbol = 'ETH/JPY' # change for your symbol limit = 100", "= pd.read_csv(\"transaction_liquid.csv\") print(x.iloc[0]) x = x.drop_duplicates().sort_values('time', ascending=False) x.to_csv('transaction_liquid.csv', index=False) print('sorted') while True: save_and_get_str()", "+ ',' + str(trade_utc) + ',' + str(trade['my_side']) + ',' + str(abs( float(trade['quantity'])))", "str(trade['my_side']) + ',' + str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) + ',' +", "# -1 day from now while since < exchange.milliseconds(): symbol = 'ETH/JPY' #", "operator import csv import cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since =", "= datetime.utcfromtimestamp( float(trade['created_at'])).strftime('%Y-%m-%d %H:%M:%S.%f') trades_to_append = str(int(float(trade['created_at']) * 1000)) + ',' + str(trade_utc)", "cfg liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds() - 86400000 #", "',' + str(abs( float(trade['quantity']))) + ',' + str(float(trade['price'])) + ',' + str(element['fee']) +", "= 100 # change for your limit orders = exchange.fetch_my_trades(symbol, since, limit) if", "str(element['fee']) + ',' + str(element['takerOrMaker']) df.loc[len(df.index)] = trades_to_append.split(\",\") # df.to_csv('transaction_liquid.csv') if not os.path.isfile(\"transaction_liquid.csv\"):", "'ETH/JPY' # change for your symbol limit = 100 # change for your", "time import os import pandas as pd from datetime import datetime, timedelta import", "- 86400000 # -1 day from now def save_and_get_str(): # SAVE all_orders =", "liquid = ccxt.liquid(cfg.liquid_misc_credential) exchange = liquid since = exchange.milliseconds() - 86400000 # -1", "'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element in all_orders: trade = element['info']", "liquid since = exchange.milliseconds() - 86400000 # -1 day from now def save_and_get_str():", "break df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount', 'price', 'fee', 'takerOrMaker']) for element", "1]['timestamp'] all_orders += orders else: break df = pd.DataFrame( columns=['utc', 'time', 'type', 'amount'," ]
[ "datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time string and returns a date", "value, unit = match.groups() value = float(value) days, seconds = cls.delta_units[unit] rslt +=", "'', 0, '0' and 'now' -> datetime.now() - if fmt is passed same", "epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object() # singleton, for", "\"+-\" or ',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo", "strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time string and returns a", "@classmethod def strptime(cls, datestr): pass class Date(DateTime): @classmethod def strptime(cls, datestr): pass def", "raise DateTimeError(\"can't parse %r as delta\" % field) value, unit = match.groups() value", "name to parse :param use_ctime: if file name contains no string use file's", "# and rounding (use by strptime) = # d for days # default", ":param fname: file name to parse :param use_ctime: if file name contains no", "# d for days # w for weeks # and rounding = #", "comma separated list of a start time and a end time \"\"\" if", "epoche) :param fname: file name to parse :param use_ctime: if file name contains", "time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche) :param", "date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and time", "time string and returns a date time object Supported Formats: - formats as", "no tz info is specified in the string, then this param decides which", "parse %r as delta\" % field) value, unit = match.groups() value = float(value)", "'': (1, 0), # default unit = days } @classmethod def strptimedelta(cls, deltastr,", "(val.strip() for val in deltastr.split(',')) delta_rex = cls.delta_rex for field in fields: match", "= from_str if from_str else default_from to_str = to_str if to_str else default_to", "seconds h for hours M for minutes d for days w for weeks", "contains no string use file's mtime \"\"\" def to_timestamp(t): \"\"\" convert a datetime", "default_from to_str = default_to else: from_str, to_str = [v.strip() for v in rangestr.split(',',", "not implemented so far # and rounding (use by strptime) = # d", "else datestr if datestr in (None, '', '0', 'now'): return datetime.now(tzinfo) if datestr[:1]", "r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated list of deltas", "\"\"\" parses a date time string and returns a date time object Supported", "= datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)): datestr = str(datestr) datestr =", "no rounding tzinfo = tzinfo if tzinfo is not DEFAULT else tzlocal.get_localzone() if", "of a start time and a end time \"\"\" if rangestr is None:", "for days # default no rounding # \"\"\" # TODO: think about using", "specified in the string, then this param decides which time zone shall be", "\"\"\" # NOT IMPLEMENTED SO FAR # - delta format with +-num units[rounding],", "'+-<num><unit>' where unit = s for seconds h for hours M for minutes", "if rslt.tzinfo is None and tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod def", "datestr if datestr in (None, '', '0', 'now'): return datetime.now(tzinfo) if datestr[:1] in", "time and a end time \"\"\" if rangestr is None: from_str = default_from", "cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod def strptime(cls, datestr): pass class Date(DateTime):", "return t_from, t_to class Time(DateTime): @classmethod def strptime(cls, datestr): pass class Date(DateTime): @classmethod", "t_to class Time(DateTime): @classmethod def strptime(cls, datestr): pass class Date(DateTime): @classmethod def strptime(cls,", "strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time from", "def strptime(cls, datestr): pass class Date(DateTime): @classmethod def strptime(cls, datestr): pass def fname_to_time(fname,", "and time(in seconds since epoche) :param fname: file name to parse :param use_ctime:", ":param fmt: if passedm then use datetime's normal strptime BUT add a time", "datetime timedelta object Supported Formats: '+-<num><unit>' where unit = s for seconds h", "% (single_delta, single_delta) delta_rex = re.compile('^' + single_delta + '$') delta_units = {", "\"\"\" parses a time range string a time range string is a comma", "date time string and returns a datetime timedelta object Supported Formats: '+-<num><unit>' where", "start time and a end time \"\"\" if rangestr is None: from_str =", "a datetime timedelta object Supported Formats: '+-<num><unit>' where unit = s for seconds", "and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche) :param fname: file name", "field) value, unit = match.groups() value = float(value) days, seconds = cls.delta_units[unit] rslt", "datetime import datetime from datetime import timedelta import dateutil.parser import pytz import tzlocal", "class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma", "= datetime.now(pytz.utc) fields = (val.strip() for val in deltastr.split(',')) delta_rex = cls.delta_rex for", "use file's mtime \"\"\" def to_timestamp(t): \"\"\" convert a datetime object to seconds", "- formats as supported by dateutil.parser - None, '', 0, '0' and 'now'", "[v.strip() for v in rangestr.split(',', 1)] from_str = from_str if from_str else default_from", "3600), 'd': (1, 0), 'w': (7, 0), '': (1, 0), # default unit", "for days # default no rounding tzinfo = tzinfo if tzinfo is not", "DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)'", "else default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to class Time(DateTime):", "1)] from_str = from_str if from_str else default_from to_str = to_str if to_str", "rslt = datetime.now(pytz.utc) fields = (val.strip() for val in deltastr.split(',')) delta_rex = cls.delta_rex", "and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and", "object other: use other time zone \"\"\" # NOT IMPLEMENTED SO FAR #", "time string and returns a datetime timedelta object Supported Formats: '+-<num><unit>' where unit", "= re.compile('^' + single_delta + '$') delta_units = { 's': (0, 1), 'M':", "= days \"\"\" # not implemented so far # and rounding (use by", "match.groups() value = float(value) days, seconds = cls.delta_units[unit] rslt += timedelta(days * value,", "- None, '', 0, '0' and 'now' -> datetime.now() - if fmt is", "this param decides which time zone shall be used. DEFAULT: use local time", "return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time range", "used. DEFAULT: use local time zone None: return naive time zone object other:", "time \"\"\" if rangestr is None: from_str = default_from to_str = default_to else:", "separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time", "days w for weeks default = days \"\"\" # not implemented so far", "in (None, '', '0', 'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\" or ','", "return rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time", "= str(datestr) datestr = datestr.strip() if datestr else datestr if datestr in (None,", "parse :param use_ctime: if file name contains no string use file's ctime :param", "rounding # \"\"\" # TODO: think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields", "None and tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d',", "= to_str if to_str else default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return", "(use by strptime) = # d for days # default no rounding #", "file's ctime :param use_mtime: if file name contains no string use file's mtime", "passed same as datetime.strptime :param datestr: date string to be passed :param fmt:", "s for seconds h for hours M for minutes d for days w", "import datetime from datetime import timedelta import dateutil.parser import pytz import tzlocal #", "and tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'):", "rslt += timedelta(days * value, seconds * value) return rslt @classmethod def strptime(cls,", "for args with default values class DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object):", "custom exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt", "by strptime) = # d for days # default no rounding # \"\"\"", "IMPLEMENTED SO FAR # - delta format with +-num units[rounding], # where unit", "datestr = str(datestr) datestr = datestr.strip() if datestr else datestr if datestr in", "parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time range string a time range", "+ '$') delta_units = { 's': (0, 1), 'M': (0, 60), 'h': (0,", "datestr = datestr.strip() if datestr else datestr if datestr in (None, '', '0',", "= dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo: rslt = tzinfo.localize(rslt) return rslt", "local time zone None: return naive time zone object other: use other time", "time(in seconds since epoche) :param fname: file name to parse :param use_ctime: if", "# and rounding = # d for days # default no rounding tzinfo", "\"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche) :param fname: file name to parse", "\"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds", "TODO: think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip() for val", "default_from to_str = to_str if to_str else default_to t_from = cls.strptime(from_str) t_to =", "dateutil.parser import pytz import tzlocal # datetime objct for beginning of epoch T_EPOCH", "for weeks # and rounding = # d for days # default no", "minutes # h for hours # d for days # w for weeks", "be passed :param fmt: if passedm then use datetime's normal strptime BUT add", "@classmethod def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date", "of supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date", "d for days w for weeks default = days \"\"\" # not implemented", "\"\"\" parses a date time string and returns a datetime timedelta object Supported", "datetime.now(tzinfo) if datestr[:1] in \"+-\" or ',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt", "is None and tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None,", "use local time zone None: return naive time zone object other: use other", "objct for beginning of epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT =", "float(value) days, seconds = cls.delta_units[unit] rslt += timedelta(days * value, seconds * value)", "from_str = from_str if from_str else default_from to_str = to_str if to_str else", "contains no string use file's ctime :param use_mtime: if file name contains no", "then use datetime's normal strptime BUT add a time zone info :param tzinfo:", "= s for seconds h for hours M for minutes d for days", "@classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time range string a", "float)): datestr = str(datestr) datestr = datestr.strip() if datestr else datestr if datestr", "if passedm then use datetime's normal strptime BUT add a time zone info", "from_str else default_from to_str = to_str if to_str else default_to t_from = cls.strptime(from_str)", "* value) return rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a", "default values class DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)'", "rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time string", "and 'now' -> datetime.now() - if fmt is passed same as datetime.strptime :param", "with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and", "fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time from an fname examples of", "(int, float)): datestr = str(datestr) datestr = datestr.strip() if datestr else datestr if", "tzinfo is not DEFAULT else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt) else:", "to be passed :param fmt: if passedm then use datetime's normal strptime BUT", "s for seconds # M for minutes # h for hours # d", "a time range string is a comma separated list of a start time", "datetime objct for beginning of epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT", "fields = (val.strip() for val in deltastr.split(',')) delta_rex = cls.delta_rex for field in", "\"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\"", "tz info is specified in the string, then this param decides which time", "date string to be passed :param fmt: if passedm then use datetime's normal", "\"\"\" custom exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' #", "parses a time range string a time range string is a comma separated", "else: if isinstance(datestr, (int, float)): datestr = str(datestr) datestr = datestr.strip() if datestr", "default_from='-1d', default_to='now'): \"\"\" parses a time range string a time range string is", "and a end time \"\"\" if rangestr is None: from_str = default_from to_str", "rangestr.split(',', 1)] from_str = from_str if from_str else default_from to_str = to_str if", "use_mtime=False, tz=None): \"\"\" extracts date time from an fname examples of supported formats:", "DEFAULT = object() # singleton, for args with default values class DateTimeError(Exception): \"\"\"", "is passed same as datetime.strptime :param datestr: date string to be passed :param", "pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time from an fname", "datestr else datestr if datestr in (None, '', '0', 'now'): return datetime.now(tzinfo) if", "def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time", "(single_delta, single_delta) delta_rex = re.compile('^' + single_delta + '$') delta_units = { 's':", "else default_from to_str = to_str if to_str else default_to t_from = cls.strptime(from_str) t_to", "'w': (7, 0), '': (1, 0), # default unit = days } @classmethod", "@classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time string and", "string is a comma separated list of a start time and a end", "datetime's normal strptime BUT add a time zone info :param tzinfo: if no", "using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip() for val in deltastr.split(',')) delta_rex", "to handle comma separated list of deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta,", "\"\"\" if rangestr is None: from_str = default_from to_str = default_to else: from_str,", "and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche)", "return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo: rslt", "units[rounding], # where unit = # s for seconds # M for minutes", "deltastr.split(',')) delta_rex = cls.delta_rex for field in fields: match = delta_rex.match(field) if not", "extracts date time from an fname examples of supported formats: \"fnameYYYYMMDD\" just a", "datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo:", "# - delta format with +-num units[rounding], # where unit = # s", "passed :param fmt: if passedm then use datetime's normal strptime BUT add a", "cls.delta_rex for field in fields: match = delta_rex.match(field) if not match: raise DateTimeError(\"can't", "(0, 1), 'M': (0, 60), 'h': (0, 3600), 'd': (1, 0), 'w': (7,", "(7, 0), '': (1, 0), # default unit = days } @classmethod def", "datestr in (None, '', '0', 'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\" or", "for field in fields: match = delta_rex.match(field) if not match: raise DateTimeError(\"can't parse", "t_to = cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod def strptime(cls, datestr): pass", "h for hours # d for days # w for weeks # and", "args with default values class DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object): single_delta", "a date time object Supported Formats: - formats as supported by dateutil.parser -", "= cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod def strptime(cls, datestr): pass class", "exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to", "# TODO: think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip() for", "NOT IMPLEMENTED SO FAR # - delta format with +-num units[rounding], # where", "= # d for days # default no rounding tzinfo = tzinfo if", "return naive time zone object other: use other time zone \"\"\" # NOT", "'M': (0, 60), 'h': (0, 3600), 'd': (1, 0), 'w': (7, 0), '':", "since epoche) :param fname: file name to parse :param use_ctime: if file name", "file name to parse :param use_ctime: if file name contains no string use", "for val in deltastr.split(',')) delta_rex = cls.delta_rex for field in fields: match =", "in the string, then this param decides which time zone shall be used.", "60), 'h': (0, 3600), 'd': (1, 0), 'w': (7, 0), '': (1, 0),", "M for minutes # h for hours # d for days # w", "shall be used. DEFAULT: use local time zone None: return naive time zone", "naive time zone object other: use other time zone \"\"\" # NOT IMPLEMENTED", "zone \"\"\" # NOT IMPLEMENTED SO FAR # - delta format with +-num", "rounding = # d for days # default no rounding tzinfo = tzinfo", "Formats: - formats as supported by dateutil.parser - None, '', 0, '0' and", "singleton, for args with default values class DateTimeError(Exception): \"\"\" custom exception \"\"\" class", "tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo: rslt = tzinfo.localize(rslt)", "for hours M for minutes d for days w for weeks default =", "\"\"\" # TODO: think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip()", "supported by dateutil.parser - None, '', 0, '0' and 'now' -> datetime.now() -", "re.compile('^' + single_delta + '$') delta_units = { 's': (0, 1), 'M': (0,", "string a time range string is a comma separated list of a start", "\"\"\" # not implemented so far # and rounding (use by strptime) =", "same as datetime.strptime :param datestr: date string to be passed :param fmt: if", "file's mtime \"\"\" def to_timestamp(t): \"\"\" convert a datetime object to seconds since", "info=None, raise_on_error=True): \"\"\" parses a date time string and returns a datetime timedelta", "range string a time range string is a comma separated list of a", "single_delta + '$') delta_units = { 's': (0, 1), 'M': (0, 60), 'h':", "0), # default unit = days } @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True):", "= tzinfo if tzinfo is not DEFAULT else tzlocal.get_localzone() if fmt: rslt =", "a comma separated list of a start time and a end time \"\"\"", "object Supported Formats: - formats as supported by dateutil.parser - None, '', 0,", "where unit = # s for seconds # M for minutes # h", "weeks # and rounding = # d for days # default no rounding", "fmt: rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)): datestr = str(datestr)", "tzlocal # datetime objct for beginning of epoch T_EPOCH = datetime(1970, 1, 1,", "in deltastr.split(',')) delta_rex = cls.delta_rex for field in fields: match = delta_rex.match(field) if", "a time zone info :param tzinfo: if no tz info is specified in", "for seconds # M for minutes # h for hours # d for", "1), 'M': (0, 60), 'h': (0, 3600), 'd': (1, 0), 'w': (7, 0),", "tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)): datestr", "d for days # default no rounding tzinfo = tzinfo if tzinfo is", "no string use file's mtime \"\"\" def to_timestamp(t): \"\"\" convert a datetime object", "days \"\"\" # not implemented so far # and rounding (use by strptime)", "# s for seconds # M for minutes # h for hours #", "rslt.tzinfo is None and tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls,", ":param use_ctime: if file name contains no string use file's ctime :param use_mtime:", "if file name contains no string use file's mtime \"\"\" def to_timestamp(t): \"\"\"", "beginning of epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object() #", "comma separated list of deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex", "attempt to handle comma separated list of deltas # multi_delta = r'^%s(?:,%s)*$' %", "rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)): datestr = str(datestr) datestr", "time from an fname examples of supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\"", "datetime.now() - if fmt is passed same as datetime.strptime :param datestr: date string", "date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche) :param fname: file", "to parse :param use_ctime: if file name contains no string use file's ctime", "fields: match = delta_rex.match(field) if not match: raise DateTimeError(\"can't parse %r as delta\"", "single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated list of deltas #", "string and returns a date time object Supported Formats: - formats as supported", "datetime.now(pytz.utc) fields = (val.strip() for val in deltastr.split(',')) delta_rex = cls.delta_rex for field", "datetime import timedelta import dateutil.parser import pytz import tzlocal # datetime objct for", "value, seconds * value) return rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\"", "if datestr in (None, '', '0', 'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\"", "to_str = [v.strip() for v in rangestr.split(',', 1)] from_str = from_str if from_str", "# d for days # default no rounding # \"\"\" # TODO: think", "file name contains no string use file's mtime \"\"\" def to_timestamp(t): \"\"\" convert", "FAR # - delta format with +-num units[rounding], # where unit = #", "rounding (use by strptime) = # d for days # default no rounding", "be used. DEFAULT: use local time zone None: return naive time zone object", "strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a date time string and returns a", "= cls.delta_rex for field in fields: match = delta_rex.match(field) if not match: raise", "info is specified in the string, then this param decides which time zone", "default no rounding # \"\"\" # TODO: think about using dateutil.parser.relativedelta rslt =", "v in rangestr.split(',', 1)] from_str = from_str if from_str else default_from to_str =", "pass class Date(DateTime): @classmethod def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None):", "def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a date time string and returns", "time zone shall be used. DEFAULT: use local time zone None: return naive", "values class DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta", "r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated list of deltas # multi_delta =", "# h for hours # d for days # w for weeks #", "in rangestr.split(',', 1)] from_str = from_str if from_str else default_from to_str = to_str", "time zone info :param tzinfo: if no tz info is specified in the", "deltastr, info=None, raise_on_error=True): \"\"\" parses a date time string and returns a datetime", "is None: from_str = default_from to_str = default_to else: from_str, to_str = [v.strip()", "rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time range string", "import re from datetime import datetime from datetime import timedelta import dateutil.parser import", "(None, '', '0', 'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\" or ',' in", "# attempt to handle comma separated list of deltas # multi_delta = r'^%s(?:,%s)*$'", "from datetime import timedelta import dateutil.parser import pytz import tzlocal # datetime objct", "if rangestr is None: from_str = default_from to_str = default_to else: from_str, to_str", "string, then this param decides which time zone shall be used. DEFAULT: use", "list of a start time and a end time \"\"\" if rangestr is", "with default values class DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object): single_delta =", "with +-num units[rounding], # where unit = # s for seconds # M", "# NOT IMPLEMENTED SO FAR # - delta format with +-num units[rounding], #", "(1, 0), 'w': (7, 0), '': (1, 0), # default unit = days", "just a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date", "timedelta import dateutil.parser import pytz import tzlocal # datetime objct for beginning of", "the string, then this param decides which time zone shall be used. DEFAULT:", ":param use_mtime: if file name contains no string use file's mtime \"\"\" def", "import tzlocal # datetime objct for beginning of epoch T_EPOCH = datetime(1970, 1,", "for v in rangestr.split(',', 1)] from_str = from_str if from_str else default_from to_str", "# \"\"\" # TODO: think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields =", "fname examples of supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with separators", "delta format with +-num units[rounding], # where unit = # s for seconds", "single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated list", "0, '0' and 'now' -> datetime.now() - if fmt is passed same as", "'now' -> datetime.now() - if fmt is passed same as datetime.strptime :param datestr:", "+ single_delta + '$') delta_units = { 's': (0, 1), 'M': (0, 60),", "is a comma separated list of a start time and a end time", "to_timestamp(t): \"\"\" convert a datetime object to seconds since epoch \"\"\" return (t", "None: from_str = default_from to_str = default_to else: from_str, to_str = [v.strip() for", "a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and", "from_str = default_from to_str = default_to else: from_str, to_str = [v.strip() for v", "def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time range string a time", "\"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\"", "for days w for weeks default = days \"\"\" # not implemented so", "hours M for minutes d for days w for weeks default = days", "+-num units[rounding], # where unit = # s for seconds # M for", "DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated", "datestr[:1] in \"+-\" or ',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr)", "if fmt is passed same as datetime.strptime :param datestr: date string to be", "isinstance(datestr, (int, float)): datestr = str(datestr) datestr = datestr.strip() if datestr else datestr", "by dateutil.parser - None, '', 0, '0' and 'now' -> datetime.now() - if", "parses a date time string and returns a datetime timedelta object Supported Formats:", "tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time", "None: return naive time zone object other: use other time zone \"\"\" #", "0), '': (1, 0), # default unit = days } @classmethod def strptimedelta(cls,", "or ',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is", "\"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\"", "M for minutes d for days w for weeks default = days \"\"\"", "far # and rounding (use by strptime) = # d for days #", "seconds * value) return rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses", "time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in", "Time(DateTime): @classmethod def strptime(cls, datestr): pass class Date(DateTime): @classmethod def strptime(cls, datestr): pass", "time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche) :param fname: file name to", "'d': (1, 0), 'w': (7, 0), '': (1, 0), # default unit =", "# singleton, for args with default values class DateTimeError(Exception): \"\"\" custom exception \"\"\"", "delta_rex = re.compile('^' + single_delta + '$') delta_units = { 's': (0, 1),", "normal strptime BUT add a time zone info :param tzinfo: if no tz", "(0, 3600), 'd': (1, 0), 'w': (7, 0), '': (1, 0), # default", "w for weeks default = days \"\"\" # not implemented so far #", "unit = match.groups() value = float(value) days, seconds = cls.delta_units[unit] rslt += timedelta(days", "= (val.strip() for val in deltastr.split(',')) delta_rex = cls.delta_rex for field in fields:", ":param tzinfo: if no tz info is specified in the string, then this", "separated list of deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex =", "hours # d for days # w for weeks # and rounding =", "supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and", "to_str = to_str if to_str else default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str)", "unit = s for seconds h for hours M for minutes d for", "@classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a date time string and", "delta_units = { 's': (0, 1), 'M': (0, 60), 'h': (0, 3600), 'd':", "-> datetime.now() - if fmt is passed same as datetime.strptime :param datestr: date", "# default no rounding # \"\"\" # TODO: think about using dateutil.parser.relativedelta rslt", "match = delta_rex.match(field) if not match: raise DateTimeError(\"can't parse %r as delta\" %", "time zone object other: use other time zone \"\"\" # NOT IMPLEMENTED SO", "for minutes # h for hours # d for days # w for", "for minutes d for days w for weeks default = days \"\"\" #", "name contains no string use file's ctime :param use_mtime: if file name contains", "tzinfo=pytz.utc) DEFAULT = object() # singleton, for args with default values class DateTimeError(Exception):", "= datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object() # singleton, for args with", "of deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^' +", "if to_str else default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to", "',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None", "from datetime import datetime from datetime import timedelta import dateutil.parser import pytz import", "separated list of a start time and a end time \"\"\" if rangestr", "date and time(in seconds since epoche) :param fname: file name to parse :param", "field in fields: match = delta_rex.match(field) if not match: raise DateTimeError(\"can't parse %r", "datetime.strptime :param datestr: date string to be passed :param fmt: if passedm then", "not DEFAULT else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr,", "datestr): pass class Date(DateTime): @classmethod def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False,", "unit = days } @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a", "(0, 60), 'h': (0, 3600), 'd': (1, 0), 'w': (7, 0), '': (1,", "from an fname examples of supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date", "for days # w for weeks # and rounding = # d for", "1, 1, tzinfo=pytz.utc) DEFAULT = object() # singleton, for args with default values", "returns a datetime timedelta object Supported Formats: '+-<num><unit>' where unit = s for", "default = days \"\"\" # not implemented so far # and rounding (use", "to_str if to_str else default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from,", "if not match: raise DateTimeError(\"can't parse %r as delta\" % field) value, unit", "use_mtime: if file name contains no string use file's mtime \"\"\" def to_timestamp(t):", "= object() # singleton, for args with default values class DateTimeError(Exception): \"\"\" custom", "zone info :param tzinfo: if no tz info is specified in the string,", "for weeks default = days \"\"\" # not implemented so far # and", "date time string and returns a date time object Supported Formats: - formats", "and returns a date time object Supported Formats: - formats as supported by", "seconds # M for minutes # h for hours # d for days", "convert a datetime object to seconds since epoch \"\"\" return (t - T_EPOCH).total_seconds()", "rangestr is None: from_str = default_from to_str = default_to else: from_str, to_str =", "then this param decides which time zone shall be used. DEFAULT: use local", "'h': (0, 3600), 'd': (1, 0), 'w': (7, 0), '': (1, 0), #", "class Time(DateTime): @classmethod def strptime(cls, datestr): pass class Date(DateTime): @classmethod def strptime(cls, datestr):", "examples of supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\"", "rslt = tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses", "string use file's mtime \"\"\" def to_timestamp(t): \"\"\" convert a datetime object to", "handle comma separated list of deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta)", "rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo: rslt = tzinfo.localize(rslt) return", "\"\"\" convert a datetime object to seconds since epoch \"\"\" return (t -", "fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time string and returns a date time", "parses a date time string and returns a date time object Supported Formats:", "t_from, t_to class Time(DateTime): @classmethod def strptime(cls, datestr): pass class Date(DateTime): @classmethod def", "= r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated list of deltas # multi_delta", "# M for minutes # h for hours # d for days #", "list of deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^'", "Date(DateTime): @classmethod def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts", "default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod", "'', '0', 'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\" or ',' in datestr:", "multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^' + single_delta + '$')", "use_ctime: if file name contains no string use file's ctime :param use_mtime: if", "and rounding (use by strptime) = # d for days # default no", "and rounding = # d for days # default no rounding tzinfo =", "if no tz info is specified in the string, then this param decides", "so far # and rounding (use by strptime) = # d for days", "= match.groups() value = float(value) days, seconds = cls.delta_units[unit] rslt += timedelta(days *", "d for days # w for weeks # and rounding = # d", "str(datestr) datestr = datestr.strip() if datestr else datestr if datestr in (None, '',", "None, '', 0, '0' and 'now' -> datetime.now() - if fmt is passed", "if datestr else datestr if datestr in (None, '', '0', 'now'): return datetime.now(tzinfo)", "= float(value) days, seconds = cls.delta_units[unit] rslt += timedelta(days * value, seconds *", "(1, 0), # default unit = days } @classmethod def strptimedelta(cls, deltastr, info=None,", "strptime(cls, datestr): pass class Date(DateTime): @classmethod def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False,", "%r as delta\" % field) value, unit = match.groups() value = float(value) days,", "days # default no rounding tzinfo = tzinfo if tzinfo is not DEFAULT", "end time \"\"\" if rangestr is None: from_str = default_from to_str = default_to", "import timedelta import dateutil.parser import pytz import tzlocal # datetime objct for beginning", "1, tzinfo=pytz.utc) DEFAULT = object() # singleton, for args with default values class", "* value, seconds * value) return rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT):", "format with +-num units[rounding], # where unit = # s for seconds #", "datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time from an", "dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip() for val in deltastr.split(',')) delta_rex =", "fmt) else: if isinstance(datestr, (int, float)): datestr = str(datestr) datestr = datestr.strip() if", "no rounding # \"\"\" # TODO: think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc)", "= [v.strip() for v in rangestr.split(',', 1)] from_str = from_str if from_str else", "string use file's ctime :param use_mtime: if file name contains no string use", "DEFAULT else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int,", "DEFAULT: use local time zone None: return naive time zone object other: use", "= r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^' + single_delta + '$') delta_units", "if file name contains no string use file's ctime :param use_mtime: if file", "name contains no string use file's mtime \"\"\" def to_timestamp(t): \"\"\" convert a", "for beginning of epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object()", "as datetime.strptime :param datestr: date string to be passed :param fmt: if passedm", "'0', 'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\" or ',' in datestr: return", "tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\"", "\"\"\" extracts date time from an fname examples of supported formats: \"fnameYYYYMMDD\" just", "h for hours M for minutes d for days w for weeks default", "= tzinfo.localize(rslt) return rslt @classmethod def parse_range(cls, rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a", "formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with separators \"fnameYYYYMMDD_HHmmss\" date and time", "weeks default = days \"\"\" # not implemented so far # and rounding", "# multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^' + single_delta +", "and returns a datetime timedelta object Supported Formats: '+-<num><unit>' where unit = s", "of epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object() # singleton,", "delta\" % field) value, unit = match.groups() value = float(value) days, seconds =", "an fname examples of supported formats: \"fnameYYYYMMDD\" just a date \"fnameYYYY-MM-DD\" date with", "raise_on_error=True): \"\"\" parses a date time string and returns a datetime timedelta object", "'s': (0, 1), 'M': (0, 60), 'h': (0, 3600), 'd': (1, 0), 'w':", "about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip() for val in deltastr.split(','))", "time range string a time range string is a comma separated list of", "= default_to else: from_str, to_str = [v.strip() for v in rangestr.split(',', 1)] from_str", "a end time \"\"\" if rangestr is None: from_str = default_from to_str =", "returns a date time object Supported Formats: - formats as supported by dateutil.parser", "if fmt: rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)): datestr =", "= cls.delta_units[unit] rslt += timedelta(days * value, seconds * value) return rslt @classmethod", "datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object() # singleton, for args with default", "T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc) DEFAULT = object() # singleton, for args", "strptime BUT add a time zone info :param tzinfo: if no tz info", "else: from_str, to_str = [v.strip() for v in rangestr.split(',', 1)] from_str = from_str", "timedelta object Supported Formats: '+-<num><unit>' where unit = s for seconds h for", "Formats: '+-<num><unit>' where unit = s for seconds h for hours M for", "dateutil.parser - None, '', 0, '0' and 'now' -> datetime.now() - if fmt", "use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time from an fname examples of supported", "# not implemented so far # and rounding (use by strptime) = #", "= { 's': (0, 1), 'M': (0, 60), 'h': (0, 3600), 'd': (1,", "date time from an fname examples of supported formats: \"fnameYYYYMMDD\" just a date", "fmt is passed same as datetime.strptime :param datestr: date string to be passed", "if isinstance(datestr, (int, float)): datestr = str(datestr) datestr = datestr.strip() if datestr else", "'0' and 'now' -> datetime.now() - if fmt is passed same as datetime.strptime", "tz=None): \"\"\" extracts date time from an fname examples of supported formats: \"fnameYYYYMMDD\"", "date time object Supported Formats: - formats as supported by dateutil.parser - None,", "days } @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a date time", "default_to='now'): \"\"\" parses a time range string a time range string is a", "ctime :param use_mtime: if file name contains no string use file's mtime \"\"\"", "Supported Formats: '+-<num><unit>' where unit = s for seconds h for hours M", "} @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a date time string", "time zone \"\"\" # NOT IMPLEMENTED SO FAR # - delta format with", "which time zone shall be used. DEFAULT: use local time zone None: return", "SO FAR # - delta format with +-num units[rounding], # where unit =", "# where unit = # s for seconds # M for minutes #", "in fields: match = delta_rex.match(field) if not match: raise DateTimeError(\"can't parse %r as", "use datetime's normal strptime BUT add a time zone info :param tzinfo: if", "'now'): return datetime.now(tzinfo) if datestr[:1] in \"+-\" or ',' in datestr: return cls.strptimedelta(datestr,", ":param datestr: date string to be passed :param fmt: if passedm then use", "implemented so far # and rounding (use by strptime) = # d for", "\"\"\" def to_timestamp(t): \"\"\" convert a datetime object to seconds since epoch \"\"\"", "pytz import tzlocal # datetime objct for beginning of epoch T_EPOCH = datetime(1970,", "strptime) = # d for days # default no rounding # \"\"\" #", "for seconds h for hours M for minutes d for days w for", "datetime from datetime import timedelta import dateutil.parser import pytz import tzlocal # datetime", "class DateTimeError(Exception): \"\"\" custom exception \"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta =", "cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo: rslt =", "# default no rounding tzinfo = tzinfo if tzinfo is not DEFAULT else", "timedelta(days * value, seconds * value) return rslt @classmethod def strptime(cls, datestr=None, fmt=None,", "as delta\" % field) value, unit = match.groups() value = float(value) days, seconds", "not match: raise DateTimeError(\"can't parse %r as delta\" % field) value, unit =", "passedm then use datetime's normal strptime BUT add a time zone info :param", "cls.delta_units[unit] rslt += timedelta(days * value, seconds * value) return rslt @classmethod def", "zone object other: use other time zone \"\"\" # NOT IMPLEMENTED SO FAR", "= delta_rex.match(field) if not match: raise DateTimeError(\"can't parse %r as delta\" % field)", "default_to else: from_str, to_str = [v.strip() for v in rangestr.split(',', 1)] from_str =", "object() # singleton, for args with default values class DateTimeError(Exception): \"\"\" custom exception", "= # d for days # default no rounding # \"\"\" # TODO:", "object Supported Formats: '+-<num><unit>' where unit = s for seconds h for hours", "# d for days # default no rounding tzinfo = tzinfo if tzinfo", "where unit = s for seconds h for hours M for minutes d", "mtime \"\"\" def to_timestamp(t): \"\"\" convert a datetime object to seconds since epoch", "tzinfo = tzinfo if tzinfo is not DEFAULT else tzlocal.get_localzone() if fmt: rslt", "to_str else default_to t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to class", "+= timedelta(days * value, seconds * value) return rslt @classmethod def strptime(cls, datestr=None,", "tzinfo if tzinfo is not DEFAULT else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr,", "zone None: return naive time zone object other: use other time zone \"\"\"", "minutes d for days w for weeks default = days \"\"\" # not", "is not DEFAULT else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt) else: if", "date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since", "a date time string and returns a datetime timedelta object Supported Formats: '+-<num><unit>'", "default unit = days } @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses", "def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date time string and returns", "time zone None: return naive time zone object other: use other time zone", "- delta format with +-num units[rounding], # where unit = # s for", "DateTimeError(\"can't parse %r as delta\" % field) value, unit = match.groups() value =", "use other time zone \"\"\" # NOT IMPLEMENTED SO FAR # - delta", "use file's ctime :param use_mtime: if file name contains no string use file's", "class Date(DateTime): @classmethod def strptime(cls, datestr): pass def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\"", "default no rounding tzinfo = tzinfo if tzinfo is not DEFAULT else tzlocal.get_localzone()", "import dateutil.parser import pytz import tzlocal # datetime objct for beginning of epoch", "deltas # multi_delta = r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^' + single_delta", "tzinfo=DEFAULT): \"\"\" parses a date time string and returns a date time object", "Supported Formats: - formats as supported by dateutil.parser - None, '', 0, '0'", "a start time and a end time \"\"\" if rangestr is None: from_str", "string to be passed :param fmt: if passedm then use datetime's normal strptime", "rangestr=None, default_from='-1d', default_to='now'): \"\"\" parses a time range string a time range string", "range string is a comma separated list of a start time and a", "t_from = cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod def", "other time zone \"\"\" # NOT IMPLEMENTED SO FAR # - delta format", "if tzinfo is not DEFAULT else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt)", "w for weeks # and rounding = # d for days # default", "val in deltastr.split(',')) delta_rex = cls.delta_rex for field in fields: match = delta_rex.match(field)", "tzinfo: if no tz info is specified in the string, then this param", "date with separators \"fnameYYYYMMDD_HHmmss\" date and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date", "rounding tzinfo = tzinfo if tzinfo is not DEFAULT else tzlocal.get_localzone() if fmt:", "- if fmt is passed same as datetime.strptime :param datestr: date string to", "formats as supported by dateutil.parser - None, '', 0, '0' and 'now' ->", "delta_rex.match(field) if not match: raise DateTimeError(\"can't parse %r as delta\" % field) value,", "days, seconds = cls.delta_units[unit] rslt += timedelta(days * value, seconds * value) return", "time range string is a comma separated list of a start time and", "= default_from to_str = default_to else: from_str, to_str = [v.strip() for v in", "{ 's': (0, 1), 'M': (0, 60), 'h': (0, 3600), 'd': (1, 0),", "value) return rslt @classmethod def strptime(cls, datestr=None, fmt=None, tzinfo=DEFAULT): \"\"\" parses a date", "re from datetime import datetime from datetime import timedelta import dateutil.parser import pytz", "\"\"\" class DateTime(object): single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle", "0), 'w': (7, 0), '': (1, 0), # default unit = days }", "r'^%s(?:,%s)*$' % (single_delta, single_delta) delta_rex = re.compile('^' + single_delta + '$') delta_units =", "in datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if rslt.tzinfo is None and", "= # s for seconds # M for minutes # h for hours", "datestr: date string to be passed :param fmt: if passedm then use datetime's", "def fname_to_time(fname, use_ctime=False, use_mtime=False, tz=None): \"\"\" extracts date time from an fname examples", "datestr.strip() if datestr else datestr if datestr in (None, '', '0', 'now'): return", "seconds = cls.delta_units[unit] rslt += timedelta(days * value, seconds * value) return rslt", "days # default no rounding # \"\"\" # TODO: think about using dateutil.parser.relativedelta", "# w for weeks # and rounding = # d for days #", "= datestr.strip() if datestr else datestr if datestr in (None, '', '0', 'now'):", "match: raise DateTimeError(\"can't parse %r as delta\" % field) value, unit = match.groups()", "to_str = default_to else: from_str, to_str = [v.strip() for v in rangestr.split(',', 1)]", "\"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date and time(in seconds since epoche) :param fname:", "dateutil.parser.parse(datestr) if rslt.tzinfo is None and tzinfo: rslt = tzinfo.localize(rslt) return rslt @classmethod", "file name contains no string use file's ctime :param use_mtime: if file name", "other: use other time zone \"\"\" # NOT IMPLEMENTED SO FAR # -", "d for days # default no rounding # \"\"\" # TODO: think about", "single_delta) delta_rex = re.compile('^' + single_delta + '$') delta_units = { 's': (0,", "from_str, to_str = [v.strip() for v in rangestr.split(',', 1)] from_str = from_str if", "from_str if from_str else default_from to_str = to_str if to_str else default_to t_from", "date and time \"fnameYYYYMMDD-HHmmss\" date and time \"fnameYYYYMMDD-HH-mm-ss\" date and time \"fnameYYYYMMDD-ssssssssss\" date", "unit = # s for seconds # M for minutes # h for", "a time range string a time range string is a comma separated list", "days # w for weeks # and rounding = # d for days", "fmt: if passedm then use datetime's normal strptime BUT add a time zone", "decides which time zone shall be used. DEFAULT: use local time zone None:", "in \"+-\" or ',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt = dateutil.parser.parse(datestr) if", "seconds since epoche) :param fname: file name to parse :param use_ctime: if file", "fname: file name to parse :param use_ctime: if file name contains no string", "BUT add a time zone info :param tzinfo: if no tz info is", "= cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod def strptime(cls,", "as supported by dateutil.parser - None, '', 0, '0' and 'now' -> datetime.now()", "import pytz import tzlocal # datetime objct for beginning of epoch T_EPOCH =", "think about using dateutil.parser.relativedelta rslt = datetime.now(pytz.utc) fields = (val.strip() for val in", "delta_rex = cls.delta_rex for field in fields: match = delta_rex.match(field) if not match:", "string and returns a datetime timedelta object Supported Formats: '+-<num><unit>' where unit =", "def to_timestamp(t): \"\"\" convert a datetime object to seconds since epoch \"\"\" return", "value = float(value) days, seconds = cls.delta_units[unit] rslt += timedelta(days * value, seconds", "if datestr[:1] in \"+-\" or ',' in datestr: return cls.strptimedelta(datestr, tzinfo) rslt =", "time object Supported Formats: - formats as supported by dateutil.parser - None, '',", "% field) value, unit = match.groups() value = float(value) days, seconds = cls.delta_units[unit]", "if from_str else default_from to_str = to_str if to_str else default_to t_from =", "# datetime objct for beginning of epoch T_EPOCH = datetime(1970, 1, 1, tzinfo=pytz.utc)", "= days } @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\" parses a date", "# default unit = days } @classmethod def strptimedelta(cls, deltastr, info=None, raise_on_error=True): \"\"\"", "for hours # d for days # w for weeks # and rounding", "cls.strptime(from_str) t_to = cls.strptime(to_str) return t_from, t_to class Time(DateTime): @classmethod def strptime(cls, datestr):", "a date time string and returns a date time object Supported Formats: -", "return datetime.now(tzinfo) if datestr[:1] in \"+-\" or ',' in datestr: return cls.strptimedelta(datestr, tzinfo)", "datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)): datestr = str(datestr) datestr = datestr.strip()", "is specified in the string, then this param decides which time zone shall", "info :param tzinfo: if no tz info is specified in the string, then", "= r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)(?:\\s*([shMdw])?)\\s*)' single_delta = r'(?:\\s*([+-]\\d+(?:\\.\\d*)?)\\s*([shMdw]?)\\s*)' # attempt to handle comma separated list of", "zone shall be used. DEFAULT: use local time zone None: return naive time", "else tzlocal.get_localzone() if fmt: rslt = datetime.strptime(datestr, fmt) else: if isinstance(datestr, (int, float)):", "no string use file's ctime :param use_mtime: if file name contains no string", "param decides which time zone shall be used. DEFAULT: use local time zone", "add a time zone info :param tzinfo: if no tz info is specified", "'$') delta_units = { 's': (0, 1), 'M': (0, 60), 'h': (0, 3600)," ]
[ "u = u + d dv = 1/tau * (k * v**2 +", "return tested def gen_fitness(target): '''Generate a fitness function interactively''' while True: method =", "p = 2 dist = 0 for i in xrange(m): dist += abs(spikes1[i]", "k//2] return spikes def dist_spike_time(train1, train2): '''Compute distance between two spike trains using", "def dist_waveform(train1, train2): '''Compute distance between two spike trains using the waveform distance", "1: print \"Error: No filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num)", "according to the Izhikevich model''' tau = 10 thresh = 35 steps =", "= 5 spikes = [] for i in xrange(len(spike_train) - k + 1):", "worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed def visualize(generation_list, target):", "continue indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers):", "enumerate(population): if ind.fitness == None: indices += [i] workers += [pool.apply_async(dist, [ind.ptype, target])]", "t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if", "max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial", "target])] for i, worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1 /", "compute the corresponding spike train''' return spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train):", "distance = dist(ind.ptype, target) if distance != 0: ind.fitness = 1 / distance", "parent_selection from ea import reproduction from ea import main from ea.ea_globals import *", "list and compute the corresponding spike train''' return spiketrain(params[0], params[1], params[2], params[3], params[4])", "label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level", "spike trains using the spike interval distance metric''' spikes1 = detect_spikes(train1) spikes2 =", "== max(window) and window[k//2] > thresh: spikes += [i + k//2] return spikes", "in tested: if ind.fitness != None: continue distance = dist(ind.ptype, target) if distance", "target, dist): '''Compute fitnesses based on distance to the target spike train''' pool", "i, ind in enumerate(developed): if ind.ptype != None: continue indices += [i] workers", "spikes = [] for i in xrange(len(spike_train) - k + 1): window =", "dist_spike_time)) elif method == 'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method", "+= abs(spikes1[i] - spikes2[i])**p dist = dist ** (1/p) penalty = (n-m)*len(train1) penalty", "target, dist_spike_time)) elif method == 'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif", "xrange(steps): train += [v] if v >= thresh: v = c u =", "detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p = 2 dist", "* import pylab import sys import copy import multiprocessing as mp def spiketrain(a,", "import * import pylab import sys import copy import multiprocessing as mp def", "= population for ind in developed: if ind.ptype != None: continue ind.ptype =", "0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester", "distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n", "between two spike trains using the waveform distance metric''' m = len(train1) p", "len(train1) p = 2 dist = 1/m * sum(abs(train1[i] - train2[i]) ** p", "copy import multiprocessing as mp def spiketrain(a, b, c, d, k,): '''Compute a", "thresh = 0 k = 5 spikes = [] for i in xrange(len(spike_train)", "the spike interval distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n =", "def visualize(generation_list, target): '''Generate pretty pictures using pylab''' best = [] average =", "adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover()", "train according to the Izhikevich model''' tau = 10 thresh = 35 steps", "= spike_train[i:i+k] if window[k//2] == max(window) and window[k//2] > thresh: spikes += [i", "print \"Unrecognized method: \" + method def develop(population): '''Development function, generates spike train", "function that makes use of multiprocessing''' developed = population workers = [] indices", "most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best", "print \"Error: No filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num) for", "1/m * sum(abs(train1[i] - train2[i]) ** p for i in xrange(m)) ** (1/p)", "1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize)", "interval distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2))", "plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual =", "140 - u + ext_input[i]) du = a/tau * (b*v - u) v", "+= [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1)", "if window[k//2] == max(window) and window[k//2] > thresh: spikes += [i + k//2]", "best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution')", "for i, worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed def", "as a list and compute the corresponding spike train''' return spiketrain(params[0], params[1], params[2],", "for num in target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1,", "best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] +", "* (dist + penalty) return dist def dist_waveform(train1, train2): '''Compute distance between two", "the target spike train''' pool = mp.Pool(mp.cpu_count()) tested = population indices = []", "spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in a spike train", "dist ** (1/p) penalty = (n-m)*len(train1) penalty = penalty / max(2*m, 1) dist", "if ind.fitness != None: continue distance = dist(ind.ptype, target) if distance != 0:", "= float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max", "1): window = spike_train[i:i+k] if window[k//2] == max(window) and window[k//2] > thresh: spikes", "max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p = 2 dist = sum(abs((spikes1[i] -", "ea import float_gtype from ea import adult_selection from ea import parent_selection from ea", "target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper", "= 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m))", "distance between two spike trains using the waveform distance metric''' m = len(train1)", "- spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p) penalty = (n", "if ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development", "int(raw_input(\"Input max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0 for none:\\n\"))", "d dv = 1/tau * (k * v**2 + 5*v + 140 -", "def develop_mp(population): '''Development function that makes use of multiprocessing''' developed = population workers", "> thresh: spikes += [i + k//2] return spikes def dist_spike_time(train1, train2): '''Compute", "detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p", "= 1 / (1 + distance) pool.close() pool.join() return tested def gen_fitness(target): '''Generate", "pool.join() return developed def visualize(generation_list, target): '''Generate pretty pictures using pylab''' best =", "** (1/p) return dist def fitness_test(population, target, dist): '''Compute fitnesses based on distance", "d, k,): '''Compute a spike train according to the Izhikevich model''' tau =", "pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv)", "= 2 dist = 0 for i in xrange(m): dist += abs(spikes1[i] -", "** (1/p) penalty = (n-m)*len(train1) penalty = penalty / max(2*m, 1) dist =", "v**2 + 5*v + 140 - u + ext_input[i]) du = a/tau *", "= population indices = [] workers = [] for i, ind in enumerate(population):", "__name__ == '__main__': if len(sys.argv) == 1: print \"Error: No filename given\" sys.exit()", "ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function", "deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot", "1 / (1 + distance) pool.close() pool.join() return tested def gen_fitness(target): '''Generate a", "return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform': return (lambda population:", "given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num) for num in target_file.read().split()] ranges", "if v >= thresh: v = c u = u + d dv", "elif method == 'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method ==", "(n-m)*len(train1) penalty = penalty / max(2*m, 1) dist = (1/n) * (dist +", "[pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return", "for i in xrange(len(spike_train) - k + 1): window = spike_train[i:i+k] if window[k//2]", "best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\")", "metric (time/interval/waveform):\\n\") if method == 'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif", "import division from ea import float_gtype from ea import adult_selection from ea import", "left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype", "'''Detect spikes in a spike train using a sliding window of size k'''", "(1/p) penalty = (n-m)*len(train1) penalty = penalty / max(2*m, 1) dist = (1/n)", "pool.close() pool.join() return developed def visualize(generation_list, target): '''Generate pretty pictures using pylab''' best", "p for i in xrange(m)) ** (1/p) return dist def fitness_test(population, target, dist):", "'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method: \" +", "import reproduction from ea import main from ea.ea_globals import * import pylab import", "sum(abs(train1[i] - train2[i]) ** p for i in xrange(m)) ** (1/p) return dist", "in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector, parent_selector, reproducer, generations, fitness_goal) visualize(generation_list,", "[] average_plus_stddev = [] average_minus_stddev = [] for pop in generation_list: best +=", "trains using the waveform distance metric''' m = len(train1) p = 2 dist", "distance != 0: ind.fitness = 1 / distance else: ind.fitness = float('Inf') return", "average = [] stddev = [] average_plus_stddev = [] average_minus_stddev = [] for", "= 1 / distance else: ind.fitness = float('Inf') return tested def fitness_test_mp(population, target,", "import pylab import sys import copy import multiprocessing as mp def spiketrain(a, b,", "worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1 / (1 + distance)", "and compute the corresponding spike train''' return spiketrain(params[0], params[1], params[2], params[3], params[4]) def", "trains using the spike time distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2)", "label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index", "if ind.fitness == None: indices += [i] workers += [pool.apply_async(dist, [ind.ptype, target])] for", "[ind.ptype, target])] for i, worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1", "target) if distance != 0: ind.fitness = 1 / distance else: ind.fitness =", "2 dist = 0 for i in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p", "import adult_selection from ea import parent_selection from ea import reproduction from ea import", "goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list", "import multiprocessing as mp def spiketrain(a, b, c, d, k,): '''Compute a spike", "[most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev", "\"Error: No filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num) for num", "in generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev +=", "return spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in a spike", "fitness function interactively''' while True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method", "thresh: spikes += [i + k//2] return spikes def dist_spike_time(train1, train2): '''Compute distance", "[] workers = [] for i, ind in enumerate(population): if ind.fitness == None:", "spike train for each individual''' developed = population for ind in developed: if", "[] stddev = [] average_plus_stddev = [] average_minus_stddev = [] for pop in", "0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population", "5 spikes = [] for i in xrange(len(spike_train) - k + 1): window", "train2): '''Compute distance between two spike trains using the spike interval distance metric'''", "distance else: ind.fitness = float('Inf') return tested def fitness_test_mp(population, target, dist): '''Compute fitnesses", "= [] average_minus_stddev = [] for pop in generation_list: best += [most_fit(pop).fitness] average", "distance metric''' m = len(train1) p = 2 dist = 1/m * sum(abs(train1[i]", "a spike train according to the Izhikevich model''' tau = 10 thresh =", "tau = 10 thresh = 35 steps = 1000 ext_input = [10 for", "k''' thresh = 0 k = 5 spikes = [] for i in", "fitness_test_mp(population, target, dist): '''Compute fitnesses based on distance to the target spike train'''", "plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__", "ext_input[i]) du = a/tau * (b*v - u) v += dv u +=", "developed = population workers = [] indices = [] pool = mp.Pool(mp.cpu_count()) for", "= reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input", "= penalty / max(2*m, 1) dist = (1/n) * (dist + penalty) return", "print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain", "[] for i in xrange(steps): train += [v] if v >= thresh: v", "for ind in tested: if ind.fitness != None: continue distance = dist(ind.ptype, target)", "using a sliding window of size k''' thresh = 0 k = 5", "developed: if ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population):", "of size k''' thresh = 0 k = 5 spikes = [] for", "indices = [] workers = [] for i, ind in enumerate(population): if ind.fitness", "stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard", "= detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p = 2", "[] indices = [] pool = mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if", "filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num) for num in target_file.read().split()]", "average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev +=", "spike_train[i:i+k] if window[k//2] == max(window) and window[k//2] > thresh: spikes += [i +", "the Izhikevich model''' tau = 10 thresh = 35 steps = 1000 ext_input", "= gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover", "return dist def dist_spike_interval(train1, train2): '''Compute distance between two spike trains using the", "if method == 'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method ==", "train = [] for i in xrange(steps): train += [v] if v >=", "[avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev += [average[-1] -", "popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector", "pictures using pylab''' best = [] average = [] stddev = [] average_plus_stddev", "raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method == 'time': return (lambda population: fitness_test_mp(population, target,", "ea.ea_globals import * import pylab import sys import copy import multiprocessing as mp", "from ea import float_gtype from ea import adult_selection from ea import parent_selection from", "spiketrain_list(params): '''Take a, b, c, d and k as a list and compute", "+ penalty) return dist def dist_spike_interval(train1, train2): '''Compute distance between two spike trains", "'''Development function, generates spike train for each individual''' developed = population for ind", "label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness", "+= [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]]", "in enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1 / (1 + distance) pool.close()", "and window[k//2] > thresh: spikes += [i + k//2] return spikes def dist_spike_time(train1,", "tested def fitness_test_mp(population, target, dist): '''Compute fitnesses based on distance to the target", "age=0) for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector, parent_selector, reproducer,", "dist_waveform(train1, train2): '''Compute distance between two spike trains using the waveform distance metric'''", "initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester,", "import main from ea.ea_globals import * import pylab import sys import copy import", "pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time", "visualize(generation_list, target): '''Generate pretty pictures using pylab''' best = [] average = []", "color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\")", "color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation", "sys import copy import multiprocessing as mp def spiketrain(a, b, c, d, k,):", "'''Compute distance between two spike trains using the waveform distance metric''' m =", "pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv) == 1: print \"Error: No filename", "fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges)", "(1/p) return dist def fitness_test(population, target, dist): '''Compute fitnesses based on distance to", "None: continue ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function that makes", "[] pool = mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if ind.ptype != None:", "[float(num) for num in target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0),", "for i, ind in enumerate(population): if ind.fitness == None: indices += [i] workers", "+ ext_input[i]) du = a/tau * (b*v - u) v += dv u", "= 2 dist = 1/m * sum(abs(train1[i] - train2[i]) ** p for i", "pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper", "crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number of", "tested = population for ind in tested: if ind.fitness != None: continue distance", "dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p)", "int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size)", "m = min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p = 2 dist =", "+= [i + k//2] return spikes def dist_spike_time(train1, train2): '''Compute distance between two", "size k''' thresh = 0 k = 5 spikes = [] for i", "10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size", "distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m", "p = 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in", "= float('Inf') return tested def fitness_test_mp(population, target, dist): '''Compute fitnesses based on distance", "interactively''' while True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method == 'time':", "dist_spike_interval)) elif method == 'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform)) else: print", "min(len(spikes1), len(spikes2)) p = 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for", "on distance to the target spike train''' tested = population for ind in", "corresponding spike train''' return spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes", "the target spike train''' tested = population for ind in tested: if ind.fitness", "pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype", "develop(population): '''Development function, generates spike train for each individual''' developed = population for", "- u + ext_input[i]) du = a/tau * (b*v - u) v +=", "reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness", "time distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2))", "in developed: if ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype) return developed def", "metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m =", "ind.ptype != None: continue indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i,", "population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed def visualize(generation_list, target): '''Generate pretty pictures", "ea import reproduction from ea import main from ea.ea_globals import * import pylab", "individual''' developed = population for ind in developed: if ind.ptype != None: continue", "\" + method def develop(population): '''Development function, generates spike train for each individual'''", "m = min(len(spikes1), len(spikes2)) p = 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] -", "makes use of multiprocessing''' developed = population workers = [] indices = []", "for i, ind in enumerate(developed): if ind.ptype != None: continue indices += [i]", "pylab''' best = [] average = [] stddev = [] average_plus_stddev = []", "(b*v - u) v += dv u += du return train def spiketrain_list(params):", "multiprocessing''' developed = population workers = [] indices = [] pool = mp.Pool(mp.cpu_count())", "float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in", "= 0 for i in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist =", "ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize", "(0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector,", "target, dist): '''Compute fitnesses based on distance to the target spike train''' tested", "if len(sys.argv) == 1: print \"Error: No filename given\" sys.exit() target_file = open(sys.argv[1])", "k,): '''Compute a spike train according to the Izhikevich model''' tau = 10", "pylab import sys import copy import multiprocessing as mp def spiketrain(a, b, c,", "spike train according to the Izhikevich model''' tau = 10 thresh = 35", "i in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist = dist ** (1/p)", "max(2*m, 1) dist = (1/n) * (dist + penalty) return dist def dist_spike_interval(train1,", "= population workers = [] indices = [] pool = mp.Pool(mp.cpu_count()) for i,", "return (lambda population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method: \" + method", "based on distance to the target spike train''' tested = population for ind", "= [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector,", "0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list =", "in xrange(steps): train += [v] if v >= thresh: v = c u", ">= thresh: v = c u = u + d dv = 1/tau", "- Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index])", "spike time distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1),", "litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer", "+ method def develop(population): '''Development function, generates spike train for each individual''' developed", "[fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)),", "average_plus_stddev = [] average_minus_stddev = [] for pop in generation_list: best += [most_fit(pop).fitness]", "def develop(population): '''Development function, generates spike train for each individual''' developed = population", "== 1: print \"Error: No filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain =", "gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover =", "generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0)", "window = spike_train[i:i+k] if window[k//2] == max(window) and window[k//2] > thresh: spikes +=", "(k * v**2 + 5*v + 140 - u + ext_input[i]) du =", "the waveform distance metric''' m = len(train1) p = 2 dist = 1/m", "in enumerate(population): if ind.fitness == None: indices += [i] workers += [pool.apply_async(dist, [ind.ptype,", "train def spiketrain_list(params): '''Take a, b, c, d and k as a list", "= 10 thresh = 35 steps = 1000 ext_input = [10 for i", "two spike trains using the spike time distance metric''' spikes1 = detect_spikes(train1) spikes2", "/ distance else: ind.fitness = float('Inf') return tested def fitness_test_mp(population, target, dist): '''Compute", "return dist def fitness_test(population, target, dist): '''Compute fitnesses based on distance to the", "stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]]", "spike interval distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1),", "'''Generate a fitness function interactively''' while True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\")", "best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target')", "- t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__':", "from ea.ea_globals import * import pylab import sys import copy import multiprocessing as", "spike train''' pool = mp.Pool(mp.cpu_count()) tested = population indices = [] workers =", "trains using the spike interval distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2)", "detect_spikes(spike_train): '''Detect spikes in a spike train using a sliding window of size", "pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ ==", "len(spikes2)) n = max(len(spikes1), len(spikes2)) p = 2 dist = 0 for i", "distance to the target spike train''' pool = mp.Pool(mp.cpu_count()) tested = population indices", "of multiprocessing''' developed = population workers = [] indices = [] pool =", "function, generates spike train for each individual''' developed = population for ind in", "train''' return spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in a", "dist_spike_time(train1, train2): '''Compute distance between two spike trains using the spike time distance", "== '__main__': if len(sys.argv) == 1: print \"Error: No filename given\" sys.exit() target_file", "spike trains using the waveform distance metric''' m = len(train1) p = 2", "label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\")", "u = 0 train = [] for i in xrange(steps): train += [v]", "= [float(num) for num in target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0,", "train += [v] if v >= thresh: v = c u = u", "workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close()", "[average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best,", "+ penalty) return dist def dist_waveform(train1, train2): '''Compute distance between two spike trains", "* (k * v**2 + 5*v + 140 - u + ext_input[i]) du", "right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv) == 1: print \"Error: No", "method: \" + method def develop(population): '''Development function, generates spike train for each", "float_gtype from ea import adult_selection from ea import parent_selection from ea import reproduction", "std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best))", "= [] for i in xrange(steps): train += [v] if v >= thresh:", "1) dist = 1/max(m-1, 1) * (dist + penalty) return dist def dist_waveform(train1,", "population: fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval': return (lambda population: fitness_test_mp(population, target,", "+= [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)),", "0 train = [] for i in xrange(steps): train += [v] if v", "(dist + penalty) return dist def dist_spike_interval(train1, train2): '''Compute distance between two spike", "detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p", "xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector, parent_selector, reproducer, generations, fitness_goal) visualize(generation_list, target_spiketrain)", "params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in a spike train using a", "population[indices[i]].fitness = 1 / (1 + distance) pool.close() pool.join() return tested def gen_fitness(target):", "[] average_minus_stddev = [] for pop in generation_list: best += [most_fit(pop).fitness] average +=", "= max(len(spikes1), len(spikes2)) p = 2 dist = 0 for i in xrange(m):", "from ea import adult_selection from ea import parent_selection from ea import reproduction from", "fitness_goal = float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for", "distance to the target spike train''' tested = population for ind in tested:", "window[k//2] == max(window) and window[k//2] > thresh: spikes += [i + k//2] return", "* v**2 + 5*v + 140 - u + ext_input[i]) du = a/tau", "spiketrain(a, b, c, d, k,): '''Compute a spike train according to the Izhikevich", "1) dist = (1/n) * (dist + penalty) return dist def dist_spike_interval(train1, train2):", "c u = u + d dv = 1/tau * (k * v**2", "in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist = dist ** (1/p) penalty", "xrange(1,m)) ** (1/p) penalty = (n - m) * len(train1) / max(2*m, 1)", "metric''' m = len(train1) p = 2 dist = 1/m * sum(abs(train1[i] -", "return train def spiketrain_list(params): '''Take a, b, c, d and k as a", "= raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method == 'time': return (lambda population: fitness_test_mp(population,", "return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval': return (lambda population:", "i in xrange(steps): train += [v] if v >= thresh: v = c", "thresh = 35 steps = 1000 ext_input = [10 for i in xrange(steps)]", "= c u = u + d dv = 1/tau * (k *", "ind.fitness != None: continue distance = dist(ind.ptype, target) if distance != 0: ind.fitness", "for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector, parent_selector, reproducer, generations,", "in xrange(steps)] v = -60 u = 0 train = [] for i", "= mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if ind.ptype != None: continue indices", "average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\")", "b, c, d, k,): '''Compute a spike train according to the Izhikevich model'''", "== None: indices += [i] workers += [pool.apply_async(dist, [ind.ptype, target])] for i, worker", "fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval))", "pretty pictures using pylab''' best = [] average = [] stddev = []", "du = a/tau * (b*v - u) v += dv u += du", "n = max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p = 2 dist =", "* (dist + penalty) return dist def dist_spike_interval(train1, train2): '''Compute distance between two", "method == 'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval':", "dist = dist ** (1/p) penalty = (n-m)*len(train1) penalty = penalty / max(2*m,", "m = len(train1) p = 2 dist = 1/m * sum(abs(train1[i] - train2[i])", "penalty = penalty / max(2*m, 1) dist = (1/n) * (dist + penalty)", "a spike train using a sliding window of size k''' thresh = 0", "= (n-m)*len(train1) penalty = penalty / max(2*m, 1) dist = (1/n) * (dist", "'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform': return (lambda", "+= [v] if v >= thresh: v = c u = u +", "distance = worker.get() population[indices[i]].fitness = 1 / (1 + distance) pool.close() pool.join() return", "'''Development function that makes use of multiprocessing''' developed = population workers = []", "- spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p) penalty = (n - m)", "spikes2[i])**p dist = dist ** (1/p) penalty = (n-m)*len(train1) penalty = penalty /", "= 1/m * sum(abs(train1[i] - train2[i]) ** p for i in xrange(m)) **", "print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue',", "distance) pool.close() pool.join() return tested def gen_fitness(target): '''Generate a fitness function interactively''' while", "= min(len(spikes1), len(spikes2)) p = 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p", "population for ind in tested: if ind.fitness != None: continue distance = dist(ind.ptype,", "crossover) generations = int(raw_input(\"Input max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal,", "u) v += dv u += du return train def spiketrain_list(params): '''Take a,", "the corresponding spike train''' return spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect", "[pool.apply_async(dist, [ind.ptype, target])] for i, worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness =", "len(spikes2)) p = 2 dist = 0 for i in xrange(m): dist +=", "None: continue distance = dist(ind.ptype, target) if distance != 0: ind.fitness = 1", "penalty = (n - m) * len(train1) / max(2*m, 1) dist = 1/max(m-1,", "waveform distance metric''' m = len(train1) p = 2 dist = 1/m *", "function interactively''' while True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method ==", "== 'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval': return", "adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate,", "with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index =", "developed = population for ind in developed: if ind.ptype != None: continue ind.ptype", "= [] pool = mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if ind.ptype !=", "population indices = [] workers = [] for i, ind in enumerate(population): if", "'''Compute distance between two spike trains using the spike interval distance metric''' spikes1", "Izhikevich model''' tau = 10 thresh = 35 steps = 1000 ext_input =", "pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\")", "float('Inf') return tested def fitness_test_mp(population, target, dist): '''Compute fitnesses based on distance to", "i in xrange(1,m)) ** (1/p) penalty = (n - m) * len(train1) /", "enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed def visualize(generation_list, target): '''Generate pretty", "= parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations", "sliding window of size k''' thresh = 0 k = 5 spikes =", "i, worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed def visualize(generation_list,", "def detect_spikes(spike_train): '''Detect spikes in a spike train using a sliding window of", "pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)),", "return dist def dist_waveform(train1, train2): '''Compute distance between two spike trains using the", "mp.Pool(mp.cpu_count()) tested = population indices = [] workers = [] for i, ind", "pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average,", "def spiketrain(a, b, c, d, k,): '''Compute a spike train according to the", "to the target spike train''' tested = population for ind in tested: if", "population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method: \" + method def develop(population):", "average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b',", "window[k//2] > thresh: spikes += [i + k//2] return spikes def dist_spike_time(train1, train2):", "target spike train''' pool = mp.Pool(mp.cpu_count()) tested = population indices = [] workers", "(1 + distance) pool.close() pool.join() return tested def gen_fitness(target): '''Generate a fitness function", "v += dv u += du return train def spiketrain_list(params): '''Take a, b,", "sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num) for num in target_file.read().split()] ranges =", "generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1]", "[ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed", "pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv) == 1: print \"Error:", "2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m)) **", "'''Take a, b, c, d and k as a list and compute the", "spikes += [i + k//2] return spikes def dist_spike_time(train1, train2): '''Compute distance between", "average_minus_stddev = [] for pop in generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)]", "__future__ import division from ea import float_gtype from ea import adult_selection from ea", "pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print", "min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p = 2 dist = 0 for", "train using a sliding window of size k''' thresh = 0 k =", "fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform))", "a/tau * (b*v - u) v += dv u += du return train", "- k + 1): window = spike_train[i:i+k] if window[k//2] == max(window) and window[k//2]", "fitnesses based on distance to the target spike train''' tested = population for", "= detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2))", "* sum(abs(train1[i] - train2[i]) ** p for i in xrange(m)) ** (1/p) return", "+ distance) pool.close() pool.join() return tested def gen_fitness(target): '''Generate a fitness function interactively'''", "for i in xrange(1,m)) ** (1/p) penalty = (n - m) * len(train1)", "-60 u = 0 train = [] for i in xrange(steps): train +=", "mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if ind.ptype != None: continue indices +=", "main from ea.ea_globals import * import pylab import sys import copy import multiprocessing", "len(spikes2)) p = 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i", "max(2*m, 1) dist = 1/max(m-1, 1) * (dist + penalty) return dist def", "color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.')", "continue distance = dist(ind.ptype, target) if distance != 0: ind.fitness = 1 /", "/ (1 + distance) pool.close() pool.join() return tested def gen_fitness(target): '''Generate a fitness", "target, dist_waveform)) else: print \"Unrecognized method: \" + method def develop(population): '''Development function,", "dist_spike_interval(train1, train2): '''Compute distance between two spike trains using the spike interval distance", "pool = mp.Pool(mp.cpu_count()) tested = population indices = [] workers = [] for", "solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level -", "population for ind in developed: if ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype)", "target, dist_spike_interval)) elif method == 'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform)) else:", "du return train def spiketrain_list(params): '''Take a, b, c, d and k as", "[] for pop in generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev +=", "= 0 k = 5 spikes = [] for i in xrange(len(spike_train) -", "(n - m) * len(train1) / max(2*m, 1) dist = 1/max(m-1, 1) *", "def spiketrain_list(params): '''Take a, b, c, d and k as a list and", "35 steps = 1000 ext_input = [10 for i in xrange(steps)] v =", "= int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector =", "return developed def develop_mp(population): '''Development function that makes use of multiprocessing''' developed =", "mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input", "method == 'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform':", "i, ind in enumerate(population): if ind.fitness == None: indices += [i] workers +=", "- v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv) == 1:", "params[4]) def detect_spikes(spike_train): '''Detect spikes in a spike train using a sliding window", "def dist_spike_interval(train1, train2): '''Compute distance between two spike trains using the spike interval", "detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p = 2 dist", "= [] indices = [] pool = mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed):", "method def develop(population): '''Development function, generates spike train for each individual''' developed =", "best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\")", "dist = 1/max(m-1, 1) * (dist + penalty) return dist def dist_waveform(train1, train2):", "params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in a spike train using a sliding", "1/tau * (k * v**2 + 5*v + 140 - u + ext_input[i])", "two spike trains using the waveform distance metric''' m = len(train1) p =", "Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain", "size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate =", "float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number of generations:\\n\")) fitness_goal", "= a/tau * (b*v - u) v += dv u += du return", "= sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p) penalty", "from __future__ import division from ea import float_gtype from ea import adult_selection from", "xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist = dist ** (1/p) penalty =", "spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function that makes use of multiprocessing''' developed", "(lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval': return (lambda population: fitness_test_mp(population,", "= detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2))", "xrange(m)) ** (1/p) return dist def fitness_test(population, target, dist): '''Compute fitnesses based on", "[(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input", "generations = int(raw_input(\"Input max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0", "= population for ind in tested: if ind.fitness != None: continue distance =", "dist): '''Compute fitnesses based on distance to the target spike train''' pool =", "spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p) penalty = (n -", "= [10 for i in xrange(steps)] v = -60 u = 0 train", "for ind in developed: if ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype) return", "model''' tau = 10 thresh = 35 steps = 1000 ext_input = [10", "dv u += du return train def spiketrain_list(params): '''Take a, b, c, d", "= best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2)", "!= None: continue distance = dist(ind.ptype, target) if distance != 0: ind.fitness =", "'''Compute distance between two spike trains using the spike time distance metric''' spikes1", "(1/n) * (dist + penalty) return dist def dist_spike_interval(train1, train2): '''Compute distance between", "def gen_fitness(target): '''Generate a fitness function interactively''' while True: method = raw_input(\"Input distance", "dist def dist_waveform(train1, train2): '''Compute distance between two spike trains using the waveform", "!= None: continue indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker", "1/max(m-1, 1) * (dist + penalty) return dist def dist_waveform(train1, train2): '''Compute distance", "- train2[i]) ** p for i in xrange(m)) ** (1/p) return dist def", "** (1/p) penalty = (n - m) * len(train1) / max(2*m, 1) dist", "train for each individual''' developed = population for ind in developed: if ind.ptype", "= [] for i, ind in enumerate(population): if ind.fitness == None: indices +=", "train2): '''Compute distance between two spike trains using the spike time distance metric'''", "!= None: continue ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function that", "for i in xrange(m)) ** (1/p) return dist def fitness_test(population, target, dist): '''Compute", "develop_mp(population): '''Development function that makes use of multiprocessing''' developed = population workers =", "target_file = open(sys.argv[1]) target_spiketrain = [float(num) for num in target_file.read().split()] ranges = [(0.001,", "between two spike trains using the spike interval distance metric''' spikes1 = detect_spikes(train1)", "+= [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype =", "using the waveform distance metric''' m = len(train1) p = 2 dist =", "= len(train1) p = 2 dist = 1/m * sum(abs(train1[i] - train2[i]) **", "mp def spiketrain(a, b, c, d, k,): '''Compute a spike train according to", "= mp.Pool(mp.cpu_count()) tested = population indices = [] workers = [] for i,", "return tested def fitness_test_mp(population, target, dist): '''Compute fitnesses based on distance to the", "0 for i in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist = dist", "average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev,", "population workers = [] indices = [] pool = mp.Pool(mp.cpu_count()) for i, ind", "= [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize =", "of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges),", "method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method == 'time': return (lambda population:", "pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot -", "best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness", "for pop in generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)]", "[individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector, parent_selector,", "** p for i in xrange(m)) ** (1/p) return dist def fitness_test(population, target,", "none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp,", "ind.fitness = float('Inf') return tested def fitness_test_mp(population, target, dist): '''Compute fitnesses based on", "= int(raw_input(\"Input max number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0 for", "= 0 train = [] for i in xrange(steps): train += [v] if", "best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain,", "= [] workers = [] for i, ind in enumerate(population): if ind.fitness ==", "generates spike train for each individual''' developed = population for ind in developed:", "best = [] average = [] stddev = [] average_plus_stddev = [] average_minus_stddev", "u += du return train def spiketrain_list(params): '''Take a, b, c, d and", "p = 2 dist = 1/m * sum(abs(train1[i] - train2[i]) ** p for", "No filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain = [float(num) for num in", "dist = (1/n) * (dist + penalty) return dist def dist_spike_interval(train1, train2): '''Compute", "True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method == 'time': return (lambda", "(-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester =", "indices += [i] workers += [pool.apply_async(dist, [ind.ptype, target])] for i, worker in enumerate(workers):", "[i + k//2] return spikes def dist_spike_time(train1, train2): '''Compute distance between two spike", "u + d dv = 1/tau * (k * v**2 + 5*v +", "(0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\"))", "if ind.ptype != None: continue indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for", "target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01, 1.0)]", "+ d dv = 1/tau * (k * v**2 + 5*v + 140", "= worker.get() population[indices[i]].fitness = 1 / (1 + distance) pool.close() pool.join() return tested", "two spike trains using the spike interval distance metric''' spikes1 = detect_spikes(train1) spikes2", "population: fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform': return (lambda population: fitness_test_mp(population, target,", "\"Unrecognized method: \" + method def develop(population): '''Development function, generates spike train for", "using the spike interval distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n", "== 'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method: \"", "elif method == 'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized", "distance between two spike trains using the spike time distance metric''' spikes1 =", "+ k//2] return spikes def dist_spike_time(train1, train2): '''Compute distance between two spike trains", "average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\")", "stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best')", "v = c u = u + d dv = 1/tau * (k", "using the spike time distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m", "target): '''Generate pretty pictures using pylab''' best = [] average = [] stddev", "= best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)),", "else: print \"Unrecognized method: \" + method def develop(population): '''Development function, generates spike", "v = -60 u = 0 train = [] for i in xrange(steps):", "= 35 steps = 1000 ext_input = [10 for i in xrange(steps)] v", "window of size k''' thresh = 0 k = 5 spikes = []", "a list and compute the corresponding spike train''' return spiketrain(params[0], params[1], params[2], params[3],", "1) * (dist + penalty) return dist def dist_waveform(train1, train2): '''Compute distance between", "return spikes def dist_spike_time(train1, train2): '''Compute distance between two spike trains using the", "developed def visualize(generation_list, target): '''Generate pretty pictures using pylab''' best = [] average", "ea import parent_selection from ea import reproduction from ea import main from ea.ea_globals", "fitness goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)]", "dist(ind.ptype, target) if distance != 0: ind.fitness = 1 / distance else: ind.fitness", "multiprocessing as mp def spiketrain(a, b, c, d, k,): '''Compute a spike train", "5*v + 140 - u + ext_input[i]) du = a/tau * (b*v -", "(0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size =", "= max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p = 2 dist = sum(abs((spikes1[i]", "(1/p) penalty = (n - m) * len(train1) / max(2*m, 1) dist =", "= [] stddev = [] average_plus_stddev = [] average_minus_stddev = [] for pop", "= (n - m) * len(train1) / max(2*m, 1) dist = 1/max(m-1, 1)", "import sys import copy import multiprocessing as mp def spiketrain(a, b, c, d,", "sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i] - spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p) penalty =", "for i in xrange(steps)] v = -60 u = 0 train = []", "to the Izhikevich model''' tau = 10 thresh = 35 steps = 1000", "best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)),", "ind in enumerate(developed): if ind.ptype != None: continue indices += [i] workers +=", "v >= thresh: v = c u = u + d dv =", "= 1/max(m-1, 1) * (dist + penalty) return dist def dist_waveform(train1, train2): '''Compute", "(lambda population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method: \" + method def", "= float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number of generations:\\n\"))", "tested: if ind.fitness != None: continue distance = dist(ind.ptype, target) if distance !=", "in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join() return developed def visualize(generation_list, target): '''Generate", "train2): '''Compute distance between two spike trains using the waveform distance metric''' m", "gen_fitness(target): '''Generate a fitness function interactively''' while True: method = raw_input(\"Input distance metric", "for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial,", "on distance to the target spike train''' pool = mp.Pool(mp.cpu_count()) tested = population", "metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n =", "thresh: v = c u = u + d dv = 1/tau *", "b, c, d and k as a list and compute the corresponding spike", "reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number of generations:\\n\")) fitness_goal =", "i in xrange(popsize)] generation_list = main.evolutionary_algorithm(initial, develop_mp, fitness_tester, adult_selector, parent_selector, reproducer, generations, fitness_goal)", "+= [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev += [average[-1] + stddev[-1]] average_minus_stddev += [average[-1]", "[] for i, ind in enumerate(population): if ind.fitness == None: indices += [i]", "d and k as a list and compute the corresponding spike train''' return", "pool.join() return tested def gen_fitness(target): '''Generate a fitness function interactively''' while True: method", "fitness_test(population, target, dist): '''Compute fitnesses based on distance to the target spike train'''", "1 / distance else: ind.fitness = float('Inf') return tested def fitness_test_mp(population, target, dist):", "in xrange(len(spike_train) - k + 1): window = spike_train[i:i+k] if window[k//2] == max(window)", "= detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p = 2", "'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method == 'interval': return (lambda", "ind in enumerate(population): if ind.fitness == None: indices += [i] workers += [pool.apply_async(dist,", "worker.get() pool.close() pool.join() return developed def visualize(generation_list, target): '''Generate pretty pictures using pylab'''", "m) * len(train1) / max(2*m, 1) dist = 1/max(m-1, 1) * (dist +", "= [] average_plus_stddev = [] average_minus_stddev = [] for pop in generation_list: best", "the spike time distance metric''' spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m =", "- spikes2[i])**p dist = dist ** (1/p) penalty = (n-m)*len(train1) penalty = penalty", "tested = population indices = [] workers = [] for i, ind in", "as mp def spiketrain(a, b, c, d, k,): '''Compute a spike train according", "[v] if v >= thresh: v = c u = u + d", "ext_input = [10 for i in xrange(steps)] v = -60 u = 0", "ind in developed: if ind.ptype != None: continue ind.ptype = spiketrain_list(ind.gtype) return developed", "enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1 / (1 + distance) pool.close() pool.join()", "fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method: \" + method def develop(population): '''Development", "= [] average = [] stddev = [] average_plus_stddev = [] average_minus_stddev =", "spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n = max(len(spikes1),", "train''' tested = population for ind in tested: if ind.fitness != None: continue", "== 'interval': return (lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform': return", "spikes2 = detect_spikes(train2) m = min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p =", "0: ind.fitness = 1 / distance else: ind.fitness = float('Inf') return tested def", "len(sys.argv) == 1: print \"Error: No filename given\" sys.exit() target_file = open(sys.argv[1]) target_spiketrain", "distance metric (time/interval/waveform):\\n\") if method == 'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time))", "= worker.get() pool.close() pool.join() return developed def visualize(generation_list, target): '''Generate pretty pictures using", "= 1/tau * (k * v**2 + 5*v + 140 - u +", "def dist_spike_time(train1, train2): '''Compute distance between two spike trains using the spike time", "return developed def visualize(generation_list, target): '''Generate pretty pictures using pylab''' best = []", "in target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0), (0.01,", "(time/interval/waveform):\\n\") if method == 'time': return (lambda population: fitness_test_mp(population, target, dist_spike_time)) elif method", "adult_selection from ea import parent_selection from ea import reproduction from ea import main", "+ 5*v + 140 - u + ext_input[i]) du = a/tau * (b*v", "and k as a list and compute the corresponding spike train''' return spiketrain(params[0],", "spike train''' tested = population for ind in tested: if ind.fitness != None:", "pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain =", "best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\")", "open(sys.argv[1]) target_spiketrain = [float(num) for num in target_file.read().split()] ranges = [(0.001, 0.2), (0.01,", "reproduction from ea import main from ea.ea_globals import * import pylab import sys", "spike train using a sliding window of size k''' thresh = 0 k", "len(spikes2)) m = min(len(spikes1), len(spikes2)) p = 2 dist = sum(abs((spikes1[i] - spikes1[i-1])-(spikes2[i]", "spike trains using the spike time distance metric''' spikes1 = detect_spikes(train1) spikes2 =", "worker.get() population[indices[i]].fitness = 1 / (1 + distance) pool.close() pool.join() return tested def", "use of multiprocessing''' developed = population workers = [] indices = [] pool", "indices = [] pool = mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if ind.ptype", "a, b, c, d and k as a list and compute the corresponding", "= adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer =", "ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function that makes use of", "using pylab''' best = [] average = [] stddev = [] average_plus_stddev =", "= float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial = [individual(gtype=float_gtype.generate(ranges), age=0) for i", "+ stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b',", "if __name__ == '__main__': if len(sys.argv) == 1: print \"Error: No filename given\"", "+ 140 - u + ext_input[i]) du = a/tau * (b*v - u)", "i, worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1 / (1 +", "0 k = 5 spikes = [] for i in xrange(len(spike_train) - k", "between two spike trains using the spike time distance metric''' spikes1 = detect_spikes(train1)", "xrange(len(spike_train) - k + 1): window = spike_train[i:i+k] if window[k//2] == max(window) and", "= open(sys.argv[1]) target_spiketrain = [float(num) for num in target_file.read().split()] ranges = [(0.001, 0.2),", "spikes def dist_spike_time(train1, train2): '''Compute distance between two spike trains using the spike", "- stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r',", "spikes2[i-1]))**p for i in xrange(1,m)) ** (1/p) penalty = (n - m) *", "'__main__': if len(sys.argv) == 1: print \"Error: No filename given\" sys.exit() target_file =", "spikes in a spike train using a sliding window of size k''' thresh", "pool = mp.Pool(mp.cpu_count()) for i, ind in enumerate(developed): if ind.ptype != None: continue", "color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\")", "ind.fitness = 1 / distance else: ind.fitness = float('Inf') return tested def fitness_test_mp(population,", "a sliding window of size k''' thresh = 0 k = 5 spikes", "ea import adult_selection from ea import parent_selection from ea import reproduction from ea", "= min(len(spikes1), len(spikes2)) n = max(len(spikes1), len(spikes2)) p = 2 dist = 0", "i in xrange(steps)] v = -60 u = 0 train = [] for", "max(window) and window[k//2] > thresh: spikes += [i + k//2] return spikes def", "penalty = (n-m)*len(train1) penalty = penalty / max(2*m, 1) dist = (1/n) *", "target spike train''' tested = population for ind in tested: if ind.fitness !=", "[i] workers += [pool.apply_async(dist, [ind.ptype, target])] for i, worker in enumerate(workers): distance =", "each individual''' developed = population for ind in developed: if ind.ptype != None:", "workers += [pool.apply_async(dist, [ind.ptype, target])] for i, worker in enumerate(workers): distance = worker.get()", "k = 5 spikes = [] for i in xrange(len(spike_train) - k +", "pool.close() pool.join() return tested def gen_fitness(target): '''Generate a fitness function interactively''' while True:", "/ max(2*m, 1) dist = (1/n) * (dist + penalty) return dist def", "+= [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype = worker.get() pool.close() pool.join()", "steps = 1000 ext_input = [10 for i in xrange(steps)] v = -60", "len(train1) / max(2*m, 1) dist = 1/max(m-1, 1) * (dist + penalty) return", "dist def fitness_test(population, target, dist): '''Compute fitnesses based on distance to the target", "enumerate(developed): if ind.ptype != None: continue indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])]", "fitnesses based on distance to the target spike train''' pool = mp.Pool(mp.cpu_count()) tested", "!= 0: ind.fitness = 1 / distance else: ind.fitness = float('Inf') return tested", "xrange(steps)] v = -60 u = 0 train = [] for i in", "based on distance to the target spike train''' pool = mp.Pool(mp.cpu_count()) tested =", "to the target spike train''' pool = mp.Pool(mp.cpu_count()) tested = population indices =", "+= [pool.apply_async(dist, [ind.ptype, target])] for i, worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness", "import float_gtype from ea import adult_selection from ea import parent_selection from ea import", "= 1000 ext_input = [10 for i in xrange(steps)] v = -60 u", "None: indices += [i] workers += [pool.apply_async(dist, [ind.ptype, target])] for i, worker in", "from ea import parent_selection from ea import reproduction from ea import main from", "def fitness_test(population, target, dist): '''Compute fitnesses based on distance to the target spike", "penalty) return dist def dist_spike_interval(train1, train2): '''Compute distance between two spike trains using", "num in target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3), (-80.0, -30.0), (0.1, 10.0),", "= [] for pop in generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev", "target_spiketrain = [float(num) for num in target_file.read().split()] ranges = [(0.001, 0.2), (0.01, 0.3),", "a fitness function interactively''' while True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if", "+= [i] workers += [pool.apply_async(dist, [ind.ptype, target])] for i, worker in enumerate(workers): distance", "from ea import main from ea.ea_globals import * import pylab import sys import", "workers = [] for i, ind in enumerate(population): if ind.fitness == None: indices", "= [] for i in xrange(len(spike_train) - k + 1): window = spike_train[i:i+k]", "def fitness_test_mp(population, target, dist): '''Compute fitnesses based on distance to the target spike", "in xrange(1,m)) ** (1/p) penalty = (n - m) * len(train1) / max(2*m,", "population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain) adult_selector, litter_size = adult_selection.gen_adult_selection(popsize) parent_selector = parent_selection.gen_parent_selection(litter_size) mutate", "= (1/n) * (dist + penalty) return dist def dist_spike_interval(train1, train2): '''Compute distance", "dist = 0 for i in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist", "for i in xrange(steps): train += [v] if v >= thresh: v =", "k + 1): window = spike_train[i:i+k] if window[k//2] == max(window) and window[k//2] >", "+= [average[-1] + stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev,", "color='b', label='Average with std.dev.') pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\")", "2 dist = 1/m * sum(abs(train1[i] - train2[i]) ** p for i in", "best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target,", "for i, worker in enumerate(workers): distance = worker.get() population[indices[i]].fitness = 1 / (1", "/ max(2*m, 1) dist = 1/max(m-1, 1) * (dist + penalty) return dist", "workers = [] indices = [] pool = mp.Pool(mp.cpu_count()) for i, ind in", "while True: method = raw_input(\"Input distance metric (time/interval/waveform):\\n\") if method == 'time': return", "from ea import reproduction from ea import main from ea.ea_globals import * import", "dist = 1/m * sum(abs(train1[i] - train2[i]) ** p for i in xrange(m))", "dist_waveform)) else: print \"Unrecognized method: \" + method def develop(population): '''Development function, generates", "c, d and k as a list and compute the corresponding spike train'''", "* (b*v - u) v += dv u += du return train def", "-30.0), (0.1, 10.0), (0.01, 1.0)] popsize = int(raw_input(\"Input population size:\\n\")) fitness_tester = gen_fitness(target_spiketrain)", "= -60 u = 0 train = [] for i in xrange(steps): train", "penalty) return dist def dist_waveform(train1, train2): '''Compute distance between two spike trains using", "stddev = [] average_plus_stddev = [] average_minus_stddev = [] for pop in generation_list:", "= most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print best_individual.fitness pylab.figure(2) pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r',", "- m) * len(train1) / max(2*m, 1) dist = 1/max(m-1, 1) * (dist", "continue ind.ptype = spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function that makes use", "params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in a spike train using", "parent_selector = parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover)", "* len(train1) / max(2*m, 1) dist = 1/max(m-1, 1) * (dist + penalty)", "method == 'waveform': return (lambda population: fitness_test_mp(population, target, dist_waveform)) else: print \"Unrecognized method:", "max(len(spikes1), len(spikes2)) p = 2 dist = 0 for i in xrange(m): dist", "dist def dist_spike_interval(train1, train2): '''Compute distance between two spike trains using the spike", "c, d, k,): '''Compute a spike train according to the Izhikevich model''' tau", "ind.fitness == None: indices += [i] workers += [pool.apply_async(dist, [ind.ptype, target])] for i,", "[] average = [] stddev = [] average_plus_stddev = [] average_minus_stddev = []", "n = max(len(spikes1), len(spikes2)) p = 2 dist = 0 for i in", "that makes use of multiprocessing''' developed = population workers = [] indices =", "division from ea import float_gtype from ea import adult_selection from ea import parent_selection", "train2[i]) ** p for i in xrange(m)) ** (1/p) return dist def fitness_test(population,", "if distance != 0: ind.fitness = 1 / distance else: ind.fitness = float('Inf')", "for each individual''' developed = population for ind in developed: if ind.ptype !=", "parent_selection.gen_parent_selection(litter_size) mutate = float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations =", "- u) v += dv u += du return train def spiketrain_list(params): '''Take", "1000 ext_input = [10 for i in xrange(steps)] v = -60 u =", "import parent_selection from ea import reproduction from ea import main from ea.ea_globals import", "= dist(ind.ptype, target) if distance != 0: ind.fitness = 1 / distance else:", "ea import main from ea.ea_globals import * import pylab import sys import copy", "[10 for i in xrange(steps)] v = -60 u = 0 train =", "i in xrange(m)) ** (1/p) return dist def fitness_test(population, target, dist): '''Compute fitnesses", "None: continue indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in", "'''Compute fitnesses based on distance to the target spike train''' tested = population", "dist): '''Compute fitnesses based on distance to the target spike train''' tested =", "spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m = min(len(spikes1), len(spikes2)) p =", "in enumerate(developed): if ind.ptype != None: continue indices += [i] workers += [pool.apply_async(spiketrain_list,", "alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average with", "pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual = most_fit(generation_list[best_index]) best_spiketrain = best_individual.ptype print best_individual.gtype print", "= spiketrain_list(ind.gtype) return developed def develop_mp(population): '''Development function that makes use of multiprocessing'''", "level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv) ==", "+= dv u += du return train def spiketrain_list(params): '''Take a, b, c,", "dist += abs(spikes1[i] - spikes2[i])**p dist = dist ** (1/p) penalty = (n-m)*len(train1)", "'''Compute a spike train according to the Izhikevich model''' tau = 10 thresh", "abs(spikes1[i] - spikes2[i])**p dist = dist ** (1/p) penalty = (n-m)*len(train1) penalty =", "'''Generate pretty pictures using pylab''' best = [] average = [] stddev =", "number of generations:\\n\")) fitness_goal = float(raw_input(\"Input fitness goal, 0 for none:\\n\")) initial =", "import copy import multiprocessing as mp def spiketrain(a, b, c, d, k,): '''Compute", "label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\")", "(dist + penalty) return dist def dist_waveform(train1, train2): '''Compute distance between two spike", "(lambda population: fitness_test_mp(population, target, dist_spike_interval)) elif method == 'waveform': return (lambda population: fitness_test_mp(population,", "+ 1): window = spike_train[i:i+k] if window[k//2] == max(window) and window[k//2] > thresh:", "10 thresh = 35 steps = 1000 ext_input = [10 for i in", "spike train''' return spiketrain(params[0], params[1], params[2], params[3], params[4]) def detect_spikes(spike_train): '''Detect spikes in", "penalty / max(2*m, 1) dist = (1/n) * (dist + penalty) return dist", "u + ext_input[i]) du = a/tau * (b*v - u) v += dv", "ind in tested: if ind.fitness != None: continue distance = dist(ind.ptype, target) if", "dv = 1/tau * (k * v**2 + 5*v + 140 - u", "else: ind.fitness = float('Inf') return tested def fitness_test_mp(population, target, dist): '''Compute fitnesses based", "i in xrange(len(spike_train) - k + 1): window = spike_train[i:i+k] if window[k//2] ==", "tested def gen_fitness(target): '''Generate a fitness function interactively''' while True: method = raw_input(\"Input", "average_minus_stddev, alpha=0.2, color='b', label=\"Standard deviation\") pylab.plot(range(len(generation_list)), best, color='r', label='Best') pylab.plot(range(len(generation_list)), average, color='b', label='Average", "pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time - t\") pylab.ylabel(\"Activation level - v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if", "distance between two spike trains using the spike interval distance metric''' spikes1 =", "for i in xrange(m): dist += abs(spikes1[i] - spikes2[i])**p dist = dist **", "[average[-1] + stddev[-1]] average_minus_stddev += [average[-1] - stddev[-1]] pylab.figure(1) pylab.fill_between(range(len(generation_list)), average_plus_stddev, average_minus_stddev, alpha=0.2,", "'''Compute fitnesses based on distance to the target spike train''' pool = mp.Pool(mp.cpu_count())", "= dist ** (1/p) penalty = (n-m)*len(train1) penalty = penalty / max(2*m, 1)", "pylab.title(\"Fitness plot - Izzy\") pylab.xlabel(\"Generation\") pylab.ylabel(\"Fitness\") pylab.legend(loc=\"upper left\") pylab.savefig(\"izzy_fitness.png\") best_index = best.index(max(best)) best_individual", "train''' pool = mp.Pool(mp.cpu_count()) tested = population indices = [] workers = []", "+= du return train def spiketrain_list(params): '''Take a, b, c, d and k", "[] for i in xrange(len(spike_train) - k + 1): window = spike_train[i:i+k] if", "spikes1 = detect_spikes(train1) spikes2 = detect_spikes(train2) n = max(len(spikes1), len(spikes2)) m = min(len(spikes1),", "in a spike train using a sliding window of size k''' thresh =", "developed def develop_mp(population): '''Development function that makes use of multiprocessing''' developed = population", "float_gtype.gen_mutate(ranges) crossover = float_gtype.gen_crossover() reproducer = reproduction.gen_reproduction(mutate, crossover) generations = int(raw_input(\"Input max number", "= u + d dv = 1/tau * (k * v**2 + 5*v", "in xrange(m)) ** (1/p) return dist def fitness_test(population, target, dist): '''Compute fitnesses based", "indices += [i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype", "v\") pylab.legend(loc=\"upper right\") pylab.savefig(\"izzy_spiketrains.png\") if __name__ == '__main__': if len(sys.argv) == 1: print", "k as a list and compute the corresponding spike train''' return spiketrain(params[0], params[1],", "pylab.plot(range(len(best_spiketrain)), best_spiketrain, color='r', label='Best solution') pylab.plot(range(len(target)), target, color='blue', label='Target') pylab.title(\"Spiketrain plot\") pylab.xlabel(\"Time -", "[i] workers += [pool.apply_async(spiketrain_list, [ind.gtype])] for i, worker in enumerate(workers): population[indices[i]].ptype = worker.get()", "pop in generation_list: best += [most_fit(pop).fitness] average += [avg_fitness(pop)] stddev += [fitness_stddev(pop)] average_plus_stddev" ]
[ "commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self): self.connection.close() def escape_string(self, buf): return self.connection.escape_string(buf)", "'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE': return", "set_autocommit, doc = \"autocommit value for current Cubrid session\") def commit(self): self.connection.commit() def", "= kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self, dictCursor", "if dictCursor: cursorClass = DictCursor else: cursorClass = Cursor return cursorClass(self) def set_autocommit(self,", "def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self): self.connection.close() def escape_string(self, buf): return", "self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self, dictCursor = None): if", "Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create a connecton to the database.' self.charset", "switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE': return True else:", "= '' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2)", "current Cubrid session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self): self.connection.close() def", "self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE': return True else: return False autocommit", "import types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args, **kwargs):", "*args, **kwargs): 'Create a connecton to the database.' self.charset = '' kwargs2 =", "isinstance(value, bool): raise ValueError(\"Parameter should be a boolean value\") if value: switch =", "'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE': return True else: return False", "\"autocommit value for current Cubrid session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def", "__init__(self, *args, **kwargs): 'Create a connecton to the database.' self.charset = '' kwargs2", "from CUBRIDdb.cursors import * import types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\"", "def set_autocommit(self, value): if not isinstance(value, bool): raise ValueError(\"Parameter should be a boolean", "be a boolean value\") if value: switch = 'TRUE' else: switch = 'FALSE'", "Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create a connecton to", "= DictCursor else: cursorClass = Cursor return cursorClass(self) def set_autocommit(self, value): if not", "\"\"\" This module implements connections for CUBRIDdb. Presently there is only one class:", "to make your own subclasses. In most cases, you will probably override Connection.default_cursor", "set_autocommit(self, value): if not isinstance(value, bool): raise ValueError(\"Parameter should be a boolean value\")", "for current Cubrid session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self): self.connection.close()", "Presently there is only one class: Connection. Others are unlikely. However, you might", "Cubrid session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self): self.connection.close() def escape_string(self,", "cursor(self, dictCursor = None): if dictCursor: cursorClass = DictCursor else: cursorClass = Cursor", "'Create a connecton to the database.' self.charset = '' kwargs2 = kwargs.copy() self.charset", "only one class: Connection. Others are unlikely. However, you might want to make", "a boolean value\") if value: switch = 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch)", "cursorClass(self) def set_autocommit(self, value): if not isinstance(value, bool): raise ValueError(\"Parameter should be a", "you might want to make your own subclasses. In most cases, you will", "return True else: return False autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit value", "= 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE': return True else: return", "pass def cursor(self, dictCursor = None): if dictCursor: cursorClass = DictCursor else: cursorClass", "Cursor class. \"\"\" from CUBRIDdb.cursors import * import types, _cubrid class Connection(object): \"\"\"CUBRID", "CUBRIDdb.cursors import * import types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def", "implements connections for CUBRIDdb. Presently there is only one class: Connection. Others are", "== 'TRUE': return True else: return False autocommit = property(get_autocommit, set_autocommit, doc =", "session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self): self.connection.close() def escape_string(self, buf):", "database.' self.charset = '' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection =", "is only one class: Connection. Others are unlikely. However, you might want to", "CUBRIDdb. Presently there is only one class: Connection. Others are unlikely. However, you", "**kwargs): 'Create a connecton to the database.' self.charset = '' kwargs2 = kwargs.copy()", "a connecton to the database.' self.charset = '' kwargs2 = kwargs.copy() self.charset =", "ValueError(\"Parameter should be a boolean value\") if value: switch = 'TRUE' else: switch", "_cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create a", "dictCursor: cursorClass = DictCursor else: cursorClass = Cursor return cursorClass(self) def set_autocommit(self, value):", "types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create", "subclasses. In most cases, you will probably override Connection.default_cursor with a non-standard Cursor", "= Cursor return cursorClass(self) def set_autocommit(self, value): if not isinstance(value, bool): raise ValueError(\"Parameter", "self.charset = '' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args,", "class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create a connecton", "dictCursor = None): if dictCursor: cursorClass = DictCursor else: cursorClass = Cursor return", "are unlikely. However, you might want to make your own subclasses. In most", "bool): raise ValueError(\"Parameter should be a boolean value\") if value: switch = 'TRUE'", "should be a boolean value\") if value: switch = 'TRUE' else: switch =", "def cursor(self, dictCursor = None): if dictCursor: cursorClass = DictCursor else: cursorClass =", "with a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import * import types, _cubrid", "'TRUE': return True else: return False autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit", "probably override Connection.default_cursor with a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import *", "you will probably override Connection.default_cursor with a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors", "if not isinstance(value, bool): raise ValueError(\"Parameter should be a boolean value\") if value:", "class. \"\"\" from CUBRIDdb.cursors import * import types, _cubrid class Connection(object): \"\"\"CUBRID Database", "autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit value for current Cubrid session\") def", "self.connection.autocommit == 'TRUE': return True else: return False autocommit = property(get_autocommit, set_autocommit, doc", "\"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create a connecton to the", "* import types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self, *args,", "doc = \"autocommit value for current Cubrid session\") def commit(self): self.connection.commit() def rollback(self):", "value): if not isinstance(value, bool): raise ValueError(\"Parameter should be a boolean value\") if", "value\") if value: switch = 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self):", "cases, you will probably override Connection.default_cursor with a non-standard Cursor class. \"\"\" from", "value for current Cubrid session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback() def close(self):", "Database Connection Object\"\"\" def __init__(self, *args, **kwargs): 'Create a connecton to the database.'", "make your own subclasses. In most cases, you will probably override Connection.default_cursor with", "'' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def", "cursorClass = Cursor return cursorClass(self) def set_autocommit(self, value): if not isinstance(value, bool): raise", "Connection.default_cursor with a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import * import types,", "False autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit value for current Cubrid session\")", "DictCursor else: cursorClass = Cursor return cursorClass(self) def set_autocommit(self, value): if not isinstance(value,", "unlikely. However, you might want to make your own subclasses. In most cases,", "True else: return False autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit value for", "\"\"\" from CUBRIDdb.cursors import * import types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection", "This module implements connections for CUBRIDdb. Presently there is only one class: Connection.", "= \"autocommit value for current Cubrid session\") def commit(self): self.connection.commit() def rollback(self): self.connection.rollback()", "for CUBRIDdb. Presently there is only one class: Connection. Others are unlikely. However,", "else: cursorClass = Cursor return cursorClass(self) def set_autocommit(self, value): if not isinstance(value, bool):", "if self.connection.autocommit == 'TRUE': return True else: return False autocommit = property(get_autocommit, set_autocommit,", "not isinstance(value, bool): raise ValueError(\"Parameter should be a boolean value\") if value: switch", "_cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self, dictCursor = None): if dictCursor: cursorClass", "= None): if dictCursor: cursorClass = DictCursor else: cursorClass = Cursor return cursorClass(self)", "value: switch = 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit", "= property(get_autocommit, set_autocommit, doc = \"autocommit value for current Cubrid session\") def commit(self):", "In most cases, you will probably override Connection.default_cursor with a non-standard Cursor class.", "__del__(self): pass def cursor(self, dictCursor = None): if dictCursor: cursorClass = DictCursor else:", "cursorClass = DictCursor else: cursorClass = Cursor return cursorClass(self) def set_autocommit(self, value): if", "return False autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit value for current Cubrid", "else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE': return True", "= _cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self, dictCursor = None): if dictCursor:", "might want to make your own subclasses. In most cases, you will probably", "Others are unlikely. However, you might want to make your own subclasses. In", "kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass def", "return cursorClass(self) def set_autocommit(self, value): if not isinstance(value, bool): raise ValueError(\"Parameter should be", "the database.' self.charset = '' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection", "non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import * import types, _cubrid class Connection(object):", "there is only one class: Connection. Others are unlikely. However, you might want", "However, you might want to make your own subclasses. In most cases, you", "**kwargs2) def __del__(self): pass def cursor(self, dictCursor = None): if dictCursor: cursorClass =", "= kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass", "def get_autocommit(self): if self.connection.autocommit == 'TRUE': return True else: return False autocommit =", "kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self):", "def __init__(self, *args, **kwargs): 'Create a connecton to the database.' self.charset = ''", "will probably override Connection.default_cursor with a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import", "want to make your own subclasses. In most cases, you will probably override", "def __del__(self): pass def cursor(self, dictCursor = None): if dictCursor: cursorClass = DictCursor", "own subclasses. In most cases, you will probably override Connection.default_cursor with a non-standard", "'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self, dictCursor = None):", "Object\"\"\" def __init__(self, *args, **kwargs): 'Create a connecton to the database.' self.charset =", "Connection. Others are unlikely. However, you might want to make your own subclasses.", "= 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit == 'TRUE':", "else: return False autocommit = property(get_autocommit, set_autocommit, doc = \"autocommit value for current", "switch = 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if self.connection.autocommit ==", "property(get_autocommit, set_autocommit, doc = \"autocommit value for current Cubrid session\") def commit(self): self.connection.commit()", "raise ValueError(\"Parameter should be a boolean value\") if value: switch = 'TRUE' else:", "Cursor return cursorClass(self) def set_autocommit(self, value): if not isinstance(value, bool): raise ValueError(\"Parameter should", "get_autocommit(self): if self.connection.autocommit == 'TRUE': return True else: return False autocommit = property(get_autocommit,", "import * import types, _cubrid class Connection(object): \"\"\"CUBRID Database Connection Object\"\"\" def __init__(self,", "self.charset = kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self,", "class: Connection. Others are unlikely. However, you might want to make your own", "your own subclasses. In most cases, you will probably override Connection.default_cursor with a", "module implements connections for CUBRIDdb. Presently there is only one class: Connection. Others", "connecton to the database.' self.charset = '' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset',", "one class: Connection. Others are unlikely. However, you might want to make your", "boolean value\") if value: switch = 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def", "to the database.' self.charset = '' kwargs2 = kwargs.copy() self.charset = kwargs2.pop('charset', 'utf8')", "if value: switch = 'TRUE' else: switch = 'FALSE' self.connection.set_autocommit(switch) def get_autocommit(self): if", "None): if dictCursor: cursorClass = DictCursor else: cursorClass = Cursor return cursorClass(self) def", "connections for CUBRIDdb. Presently there is only one class: Connection. Others are unlikely.", "most cases, you will probably override Connection.default_cursor with a non-standard Cursor class. \"\"\"", "kwargs2.pop('charset', 'utf8') self.connection = _cubrid.connect(*args, **kwargs2) def __del__(self): pass def cursor(self, dictCursor =", "override Connection.default_cursor with a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import * import", "a non-standard Cursor class. \"\"\" from CUBRIDdb.cursors import * import types, _cubrid class" ]
[ "print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode() if(result == message):", "host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345)) except:", "host = res[0] try: s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett =", "message): print('go ahead', message) s.close() if __name__ == \"__main__\": if len(sys.argv) <= 1:", "socket def client(message): s = socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host", "message) s.close() if __name__ == \"__main__\": if len(sys.argv) <= 1: print('not good') else:", "<reponame>pereztjacob/http-server import sys import socket def client(message): s = socket.socket() host = socket.gethostname()", "= socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host,", "= socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message))", "s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode() if(result == message): print('go ahead', message)", "gett = s.recv(len(message)) result = gett.decode() if(result == message): print('go ahead', message) s.close()", "except: print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode() if(result ==", "s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode()", "socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345)) except: print(\"connection failed\")", "s.recv(len(message)) result = gett.decode() if(result == message): print('go ahead', message) s.close() if __name__", "import socket def client(message): s = socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\")", "res[0] try: s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result", "== message): print('go ahead', message) s.close() if __name__ == \"__main__\": if len(sys.argv) <=", "12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode() if(result", "try: s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result =", "sys import socket def client(message): s = socket.socket() host = socket.gethostname() res =", "s.close() if __name__ == \"__main__\": if len(sys.argv) <= 1: print('not good') else: client(sys.argv[1])", "s = socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try:", "failed\") else: s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode() if(result == message): print('go", "import sys import socket def client(message): s = socket.socket() host = socket.gethostname() res", "ahead', message) s.close() if __name__ == \"__main__\": if len(sys.argv) <= 1: print('not good')", "socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett", "print('go ahead', message) s.close() if __name__ == \"__main__\": if len(sys.argv) <= 1: print('not", "def client(message): s = socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host =", "= res[0] try: s.connect((host, 12345)) except: print(\"connection failed\") else: s.send(str.encode(message)) gett = s.recv(len(message))", "result = gett.decode() if(result == message): print('go ahead', message) s.close() if __name__ ==", "= gett.decode() if(result == message): print('go ahead', message) s.close() if __name__ == \"__main__\":", "= s.recv(len(message)) result = gett.decode() if(result == message): print('go ahead', message) s.close() if", "socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345))", "res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345)) except: print(\"connection failed\") else:", "if(result == message): print('go ahead', message) s.close() if __name__ == \"__main__\": if len(sys.argv)", "gett.decode() if(result == message): print('go ahead', message) s.close() if __name__ == \"__main__\": if", "= socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0] try: s.connect((host, 12345)) except: print(\"connection", "client(message): s = socket.socket() host = socket.gethostname() res = socket.gethostbyaddr(\"127.0.0.1\") host = res[0]", "else: s.send(str.encode(message)) gett = s.recv(len(message)) result = gett.decode() if(result == message): print('go ahead'," ]
[ "Find the Sum of Digits of a Number def sum_of_digits(num): # Extracting Each", "the number: \", end=\"\") n = int(input()) S = sum_of_digits(abs(n)) print(\"The sum of", "Space Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter the number: -12", "sum_of_digits(abs(n)) print(\"The sum of digits of the given number is {}.\".format(S)) ''' Time", "the given number Space Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter", "given number Space Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter the", "digits # and compute thier sum in 's' s = 0 while num", "return s if __name__ == '__main__': # Input the number And # Call", "digits of the given number is 3. SAMPLE 2 Enter the number: 43258", "3. SAMPLE 2 Enter the number: 43258 The sum of digits of the", "SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter the number: -12 The sum of", "# Python program to Find the Sum of Digits of a Number def", "AND OUTPUT SAMPLE 1 Enter the number: -12 The sum of digits of", "(num % 10) num = num // 10 return s if __name__ ==", "# and compute thier sum in 's' s = 0 while num !=", "0 while num != 0: s = s + (num % 10) num", "\"num\" is the length of the given number Space Complexity: O(1) SAMPLE INPUT", "sum in 's' s = 0 while num != 0: s = s", "num // 10 return s if __name__ == '__main__': # Input the number", "the Sum of Digits of a Number def sum_of_digits(num): # Extracting Each digits", "given number is {}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\" is the length", "= s + (num % 10) num = num // 10 return s", "S = sum_of_digits(abs(n)) print(\"The sum of digits of the given number is {}.\".format(S))", "# Input the number And # Call the function print(\"Enter the number: \",", "= 0 while num != 0: s = s + (num % 10)", "the given number is {}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\" is the", "Time Complexity: O(log(num)), where \"num\" is the length of the given number Space", "while num != 0: s = s + (num % 10) num =", "end=\"\") n = int(input()) S = sum_of_digits(abs(n)) print(\"The sum of digits of the", "// 10 return s if __name__ == '__main__': # Input the number And", "10 return s if __name__ == '__main__': # Input the number And #", "of Digits of a Number def sum_of_digits(num): # Extracting Each digits # and", "= int(input()) S = sum_of_digits(abs(n)) print(\"The sum of digits of the given number", "function print(\"Enter the number: \", end=\"\") n = int(input()) S = sum_of_digits(abs(n)) print(\"The", "= num // 10 return s if __name__ == '__main__': # Input the", "num = num // 10 return s if __name__ == '__main__': # Input", "SAMPLE 2 Enter the number: 43258 The sum of digits of the given", "+ (num % 10) num = num // 10 return s if __name__", "number: -12 The sum of digits of the given number is 3. SAMPLE", "and compute thier sum in 's' s = 0 while num != 0:", "Each digits # and compute thier sum in 's' s = 0 while", "2 Enter the number: 43258 The sum of digits of the given number", "program to Find the Sum of Digits of a Number def sum_of_digits(num): #", "print(\"The sum of digits of the given number is {}.\".format(S)) ''' Time Complexity:", "Enter the number: 43258 The sum of digits of the given number is", "O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter the number: -12 The sum", "if __name__ == '__main__': # Input the number And # Call the function", "# Extracting Each digits # and compute thier sum in 's' s =", "0: s = s + (num % 10) num = num // 10", "the number: -12 The sum of digits of the given number is 3.", "is 3. SAMPLE 2 Enter the number: 43258 The sum of digits of", "s = s + (num % 10) num = num // 10 return", "to Find the Sum of Digits of a Number def sum_of_digits(num): # Extracting", "number is {}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\" is the length of", "digits of the given number is {}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\"", "Enter the number: -12 The sum of digits of the given number is", "sum of digits of the given number is {}.\".format(S)) ''' Time Complexity: O(log(num)),", "sum_of_digits(num): # Extracting Each digits # and compute thier sum in 's' s", "n = int(input()) S = sum_of_digits(abs(n)) print(\"The sum of digits of the given", "And # Call the function print(\"Enter the number: \", end=\"\") n = int(input())", "OUTPUT SAMPLE 1 Enter the number: -12 The sum of digits of the", "given number is 3. SAMPLE 2 Enter the number: 43258 The sum of", "is {}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\" is the length of the", "def sum_of_digits(num): # Extracting Each digits # and compute thier sum in 's'", "is the length of the given number Space Complexity: O(1) SAMPLE INPUT AND", "a Number def sum_of_digits(num): # Extracting Each digits # and compute thier sum", "1 Enter the number: -12 The sum of digits of the given number", "10) num = num // 10 return s if __name__ == '__main__': #", "of the given number is 3. SAMPLE 2 Enter the number: 43258 The", "Number def sum_of_digits(num): # Extracting Each digits # and compute thier sum in", "''' Time Complexity: O(log(num)), where \"num\" is the length of the given number", "num != 0: s = s + (num % 10) num = num", "s = 0 while num != 0: s = s + (num %", "the number: 43258 The sum of digits of the given number is 22.", "compute thier sum in 's' s = 0 while num != 0: s", "'s' s = 0 while num != 0: s = s + (num", "the number And # Call the function print(\"Enter the number: \", end=\"\") n", "sum of digits of the given number is 3. SAMPLE 2 Enter the", "Extracting Each digits # and compute thier sum in 's' s = 0", "where \"num\" is the length of the given number Space Complexity: O(1) SAMPLE", "the length of the given number Space Complexity: O(1) SAMPLE INPUT AND OUTPUT", "the given number is 3. SAMPLE 2 Enter the number: 43258 The sum", "of the given number is {}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\" is", "number: 43258 The sum of digits of the given number is 22. '''", "'__main__': # Input the number And # Call the function print(\"Enter the number:", "!= 0: s = s + (num % 10) num = num //", "== '__main__': # Input the number And # Call the function print(\"Enter the", "O(log(num)), where \"num\" is the length of the given number Space Complexity: O(1)", "Digits of a Number def sum_of_digits(num): # Extracting Each digits # and compute", "__name__ == '__main__': # Input the number And # Call the function print(\"Enter", "int(input()) S = sum_of_digits(abs(n)) print(\"The sum of digits of the given number is", "length of the given number Space Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE", "% 10) num = num // 10 return s if __name__ == '__main__':", "of digits of the given number is {}.\".format(S)) ''' Time Complexity: O(log(num)), where", "s if __name__ == '__main__': # Input the number And # Call the", "The sum of digits of the given number is 3. SAMPLE 2 Enter", "s + (num % 10) num = num // 10 return s if", "number And # Call the function print(\"Enter the number: \", end=\"\") n =", "number: \", end=\"\") n = int(input()) S = sum_of_digits(abs(n)) print(\"The sum of digits", "Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter the number: -12 The", "number is 3. SAMPLE 2 Enter the number: 43258 The sum of digits", "print(\"Enter the number: \", end=\"\") n = int(input()) S = sum_of_digits(abs(n)) print(\"The sum", "\", end=\"\") n = int(input()) S = sum_of_digits(abs(n)) print(\"The sum of digits of", "-12 The sum of digits of the given number is 3. SAMPLE 2", "# Call the function print(\"Enter the number: \", end=\"\") n = int(input()) S", "in 's' s = 0 while num != 0: s = s +", "thier sum in 's' s = 0 while num != 0: s =", "Python program to Find the Sum of Digits of a Number def sum_of_digits(num):", "the function print(\"Enter the number: \", end=\"\") n = int(input()) S = sum_of_digits(abs(n))", "= sum_of_digits(abs(n)) print(\"The sum of digits of the given number is {}.\".format(S)) '''", "{}.\".format(S)) ''' Time Complexity: O(log(num)), where \"num\" is the length of the given", "INPUT AND OUTPUT SAMPLE 1 Enter the number: -12 The sum of digits", "of a Number def sum_of_digits(num): # Extracting Each digits # and compute thier", "Call the function print(\"Enter the number: \", end=\"\") n = int(input()) S =", "Input the number And # Call the function print(\"Enter the number: \", end=\"\")", "number Space Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1 Enter the number:", "Complexity: O(log(num)), where \"num\" is the length of the given number Space Complexity:", "of the given number Space Complexity: O(1) SAMPLE INPUT AND OUTPUT SAMPLE 1", "of digits of the given number is 3. SAMPLE 2 Enter the number:", "Sum of Digits of a Number def sum_of_digits(num): # Extracting Each digits #", "SAMPLE 1 Enter the number: -12 The sum of digits of the given" ]
[ "yield FileRecord(path + (key,), offset, raw_size, raw_executable) def transform(f, out): # header_size header", "header header 2: return of the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size =", "< 4) if size != 0: _expect(f.read(size) == b'\\0' * size) def _flatten(path,", "else 0o644 out.addfile(info, f) if __name__ == '__main__': import sys with tarfile.open(fileobj=sys.stdout.buffer, mode='w|')", "if not expectation: raise ValueError('Unexpected data in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f,", "_UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int size: int executable: bool", "size < 4) if size != 0: _expect(f.read(size) == b'\\0' * size) def", "header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) - 4 #", "import json import tarfile from typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple):", "size): _expect(0 <= size < 4) if size != 0: _expect(f.read(size) == b'\\0'", "raise ValueError('Unexpected end of Asar file') return result def _expect(expectation): if not expectation:", "_expect(0 <= size < 4) if size != 0: _expect(f.read(size) == b'\\0' *", "from _flatten(path + (key,), value['files']) else: raw_offset = value['offset'] raw_size = value['size'] raw_executable", "header_pickled_size) # header header 2: return of the length prefixes header_unpadded_size = _read_uint4_le(f)", "size != 0: _expect(f.read(size) == b'\\0' * size) def _flatten(path, index): for key,", "ValueError('Unexpected data in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f,", "header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) - 4 # header", "!= 0: _expect(f.read(size) == b'\\0' * size) def _flatten(path, index): for key, value", "b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int size: int executable: bool def _read_exact(f,", "key, value in index.items(): _expect(key and '/' not in key and key not", "def _flatten(path, index): for key, value in index.items(): _expect(key and '/' not in", "+ r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755 if r.executable", "_UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f) ==", "the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size - 4 - header_unpadded_size", "else: raw_offset = value['offset'] raw_size = value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str))", "= header_pickled_size - 4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes)", "import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int size: int", "_expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0) yield FileRecord(path", "int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0) yield", "_expect(expectation): if not expectation: raise ValueError('Unexpected data in Asar file') def _read_uint4_le(f): return", "= b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int size: int executable: bool def", "raw_executable) def transform(f, out): # header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size", "0: _expect(f.read(size) == b'\\0' * size) def _flatten(path, index): for key, value in", "str offset: int size: int executable: bool def _read_exact(f, count): result = f.read(count)", "key and key not in {'.', '..'}) if 'files' in value: yield from", "index.items(): _expect(key and '/' not in key and key not in {'.', '..'})", "not in key and key not in {'.', '..'}) if 'files' in value:", "= _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for r", "value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset =", "bool def _read_exact(f, count): result = f.read(count) if len(result) != count: raise ValueError('Unexpected", "header_size header_pickled_size = _read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f) == header_pickled_size) #", "offset, raw_size, raw_executable) def transform(f, out): # header_size header _expect(f.read(4) == _UINT32_SIZE) #", "== header_pickled_size) # header header 2: return of the length prefixes header_unpadded_size =", "r.executable else 0o644 out.addfile(info, f) if __name__ == '__main__': import sys with tarfile.open(fileobj=sys.stdout.buffer,", "int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0) yield FileRecord(path + (key,), offset, raw_size,", "_flatten(path + (key,), value['files']) else: raw_offset = value['offset'] raw_size = value['size'] raw_executable =", "result = f.read(count) if len(result) != count: raise ValueError('Unexpected end of Asar file')", "r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755 if", "# header header _expect(_read_uint4_le(f) == header_pickled_size) # header header 2: return of the", "header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for", "header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size - 4 - header_unpadded_size header_bytes = _read_exact(f,", "tarfile from typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset:", "= value['offset'] raw_size = value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int))", "return of the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size - 4", "4), 'little') def _read_padding(f, size): _expect(0 <= size < 4) if size !=", "info.size = r.size info.mode = 0o755 if r.executable else 0o644 out.addfile(info, f) if", "_expect(offset >= 0) _expect(raw_size >= 0) yield FileRecord(path + (key,), offset, raw_size, raw_executable)", "class FileRecord(NamedTuple): path: str offset: int size: int executable: bool def _read_exact(f, count):", "!= count: raise ValueError('Unexpected end of Asar file') return result def _expect(expectation): if", "def transform(f, out): # header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size =", "2: return of the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size -", "def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0 <= size <", "end of Asar file') return result def _expect(expectation): if not expectation: raise ValueError('Unexpected", "= value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset", "key=lambda r: r.offset): _expect(offset == r.offset) offset = r.offset + r.size info =", "for key, value in index.items(): _expect(key and '/' not in key and key", "header 2: return of the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size", "0) yield FileRecord(path + (key,), offset, raw_size, raw_executable) def transform(f, out): # header_size", "FileRecord(path + (key,), offset, raw_size, raw_executable) def transform(f, out): # header_size header _expect(f.read(4)", "data in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size):", "raise ValueError('Unexpected data in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def", "value: yield from _flatten(path + (key,), value['files']) else: raw_offset = value['offset'] raw_size =", "raw_offset = value['offset'] raw_size = value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size,", "prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size - 4 - header_unpadded_size header_bytes =", "int executable: bool def _read_exact(f, count): result = f.read(count) if len(result) != count:", "from typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int", "header_pickled_size - 4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f,", "_expect(f.read(size) == b'\\0' * size) def _flatten(path, index): for key, value in index.items():", "= 0o755 if r.executable else 0o644 out.addfile(info, f) if __name__ == '__main__': import", "= json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for r in sorted(_flatten((), header['files']), key=lambda", "result def _expect(expectation): if not expectation: raise ValueError('Unexpected data in Asar file') def", "0o644 out.addfile(info, f) if __name__ == '__main__': import sys with tarfile.open(fileobj=sys.stdout.buffer, mode='w|') as", "0o755 if r.executable else 0o644 out.addfile(info, f) if __name__ == '__main__': import sys", "str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size >=", "and '/' not in key and key not in {'.', '..'}) if 'files'", "r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755 if r.executable else", "typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int size:", "out.addfile(info, f) if __name__ == '__main__': import sys with tarfile.open(fileobj=sys.stdout.buffer, mode='w|') as out:", "# header_size header_pickled_size = _read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f) == header_pickled_size)", "not in {'.', '..'}) if 'files' in value: yield from _flatten(path + (key,),", "raw_size = value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool))", "if len(result) != count: raise ValueError('Unexpected end of Asar file') return result def", "value in index.items(): _expect(key and '/' not in key and key not in", "_expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0)", "'..'}) if 'files' in value: yield from _flatten(path + (key,), value['files']) else: raw_offset", "(key,), value['files']) else: raw_offset = value['offset'] raw_size = value['size'] raw_executable = value.get('executable', False)", "r in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset == r.offset) offset = r.offset", "_expect(offset == r.offset) offset = r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size =", "'/' not in key and key not in {'.', '..'}) if 'files' in", "executable: bool def _read_exact(f, count): result = f.read(count) if len(result) != count: raise", "in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset == r.offset) offset = r.offset +", "- 4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size)", "ValueError('Unexpected end of Asar file') return result def _expect(expectation): if not expectation: raise", "4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset", "<gh_stars>1-10 import json import tarfile from typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class", "in value: yield from _flatten(path + (key,), value['files']) else: raw_offset = value['offset'] raw_size", "r: r.offset): _expect(offset == r.offset) offset = r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path))", "in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0", "_expect(_read_uint4_le(f) == header_pickled_size) # header header 2: return of the length prefixes header_unpadded_size", "+ (key,), offset, raw_size, raw_executable) def transform(f, out): # header_size header _expect(f.read(4) ==", "== b'\\0' * size) def _flatten(path, index): for key, value in index.items(): _expect(key", "_read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0 <= size < 4)", "== r.offset) offset = r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size", "0) _expect(raw_size >= 0) yield FileRecord(path + (key,), offset, raw_size, raw_executable) def transform(f,", "value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >=", "= int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0) yield FileRecord(path + (key,), offset,", "+ (key,), value['files']) else: raw_offset = value['offset'] raw_size = value['size'] raw_executable = value.get('executable',", "transform(f, out): # header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f)", "_read_uint4_le(f) padding_size = header_pickled_size - 4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header", "def _expect(expectation): if not expectation: raise ValueError('Unexpected data in Asar file') def _read_uint4_le(f):", ">= 0) yield FileRecord(path + (key,), offset, raw_size, raw_executable) def transform(f, out): #", "tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755 if r.executable else 0o644 out.addfile(info, f)", ">= 0) _expect(raw_size >= 0) yield FileRecord(path + (key,), offset, raw_size, raw_executable) def", "# header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) - 4", "offset: int size: int executable: bool def _read_exact(f, count): result = f.read(count) if", "not expectation: raise ValueError('Unexpected data in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4),", "_expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size", "_read_exact(f, count): result = f.read(count) if len(result) != count: raise ValueError('Unexpected end of", "f.read(count) if len(result) != count: raise ValueError('Unexpected end of Asar file') return result", "if 'files' in value: yield from _flatten(path + (key,), value['files']) else: raw_offset =", "in index.items(): _expect(key and '/' not in key and key not in {'.',", "- 4 # header header _expect(_read_uint4_le(f) == header_pickled_size) # header header 2: return", "length prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size - 4 - header_unpadded_size header_bytes", "json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for r in sorted(_flatten((), header['files']), key=lambda r:", "r.offset) offset = r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode", "def _read_padding(f, size): _expect(0 <= size < 4) if size != 0: _expect(f.read(size)", "out): # header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) -", "_read_padding(f, size): _expect(0 <= size < 4) if size != 0: _expect(f.read(size) ==", "raw_size, raw_executable) def transform(f, out): # header_size header _expect(f.read(4) == _UINT32_SIZE) # header_size", "size: int executable: bool def _read_exact(f, count): result = f.read(count) if len(result) !=", "= value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset", "and key not in {'.', '..'}) if 'files' in value: yield from _flatten(path", "for r in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset == r.offset) offset =", "f) if __name__ == '__main__': import sys with tarfile.open(fileobj=sys.stdout.buffer, mode='w|') as out: transform(sys.stdin.buffer,", "size) def _flatten(path, index): for key, value in index.items(): _expect(key and '/' not", "Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0 <=", "in {'.', '..'}) if 'files' in value: yield from _flatten(path + (key,), value['files'])", "info.mode = 0o755 if r.executable else 0o644 out.addfile(info, f) if __name__ == '__main__':", "FileRecord(NamedTuple): path: str offset: int size: int executable: bool def _read_exact(f, count): result", "int size: int executable: bool def _read_exact(f, count): result = f.read(count) if len(result)", "import tarfile from typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str", "{'.', '..'}) if 'files' in value: yield from _flatten(path + (key,), value['files']) else:", "len(result) != count: raise ValueError('Unexpected end of Asar file') return result def _expect(expectation):", "header['files']), key=lambda r: r.offset): _expect(offset == r.offset) offset = r.offset + r.size info", "Asar file') return result def _expect(expectation): if not expectation: raise ValueError('Unexpected data in", "= _read_uint4_le(f) padding_size = header_pickled_size - 4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size)", "= tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755 if r.executable else 0o644 out.addfile(info,", "yield from _flatten(path + (key,), value['files']) else: raw_offset = value['offset'] raw_size = value['size']", "header_pickled_size = _read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f) == header_pickled_size) # header", "_expect(f.read(4) == _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) - 4 # header header", "file') return result def _expect(expectation): if not expectation: raise ValueError('Unexpected data in Asar", "_read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for r in", "bool)) offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0) yield FileRecord(path +", "of the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size = header_pickled_size - 4 -", "in key and key not in {'.', '..'}) if 'files' in value: yield", "4) if size != 0: _expect(f.read(size) == b'\\0' * size) def _flatten(path, index):", "raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset)", "= 0 for r in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset == r.offset)", "(key,), offset, raw_size, raw_executable) def transform(f, out): # header_size header _expect(f.read(4) == _UINT32_SIZE)", "= r.size info.mode = 0o755 if r.executable else 0o644 out.addfile(info, f) if __name__", "4 # header header _expect(_read_uint4_le(f) == header_pickled_size) # header header 2: return of", "index): for key, value in index.items(): _expect(key and '/' not in key and", "padding_size) offset = 0 for r in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset", "padding_size = header_pickled_size - 4 - header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header =", "offset = int(raw_offset) _expect(offset >= 0) _expect(raw_size >= 0) yield FileRecord(path + (key,),", "header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset = 0", "== _UINT32_SIZE) # header_size header_pickled_size = _read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f)", "# header header 2: return of the length prefixes header_unpadded_size = _read_uint4_le(f) padding_size", "_expect(key and '/' not in key and key not in {'.', '..'}) if", "if __name__ == '__main__': import sys with tarfile.open(fileobj=sys.stdout.buffer, mode='w|') as out: transform(sys.stdin.buffer, out)", "of Asar file') return result def _expect(expectation): if not expectation: raise ValueError('Unexpected data", "0 for r in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset == r.offset) offset", "if r.executable else 0o644 out.addfile(info, f) if __name__ == '__main__': import sys with", "r.size info.mode = 0o755 if r.executable else 0o644 out.addfile(info, f) if __name__ ==", "_flatten(path, index): for key, value in index.items(): _expect(key and '/' not in key", "key not in {'.', '..'}) if 'files' in value: yield from _flatten(path +", "= _read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f) == header_pickled_size) # header header", "int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0 <= size < 4) if size", "header = json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for r in sorted(_flatten((), header['files']),", "= r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755", "- header_unpadded_size header_bytes = _read_exact(f, header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset =", "'little') def _read_padding(f, size): _expect(0 <= size < 4) if size != 0:", "def _read_exact(f, count): result = f.read(count) if len(result) != count: raise ValueError('Unexpected end", "header header _expect(_read_uint4_le(f) == header_pickled_size) # header header 2: return of the length", "if size != 0: _expect(f.read(size) == b'\\0' * size) def _flatten(path, index): for", "offset = 0 for r in sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset ==", "False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable, bool)) offset = int(raw_offset) _expect(offset >= 0)", "header_unpadded_size) header = json.loads(header_bytes) _read_padding(f, padding_size) offset = 0 for r in sorted(_flatten((),", "_expect(raw_size >= 0) yield FileRecord(path + (key,), offset, raw_size, raw_executable) def transform(f, out):", "count): result = f.read(count) if len(result) != count: raise ValueError('Unexpected end of Asar", "count: raise ValueError('Unexpected end of Asar file') return result def _expect(expectation): if not", "sorted(_flatten((), header['files']), key=lambda r: r.offset): _expect(offset == r.offset) offset = r.offset + r.size", "NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path: str offset: int size: int executable:", "* size) def _flatten(path, index): for key, value in index.items(): _expect(key and '/'", "header _expect(_read_uint4_le(f) == header_pickled_size) # header header 2: return of the length prefixes", "<= size < 4) if size != 0: _expect(f.read(size) == b'\\0' * size)", "offset = r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode =", "file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0 <= size", "return int.from_bytes(_read_exact(f, 4), 'little') def _read_padding(f, size): _expect(0 <= size < 4) if", "value['files']) else: raw_offset = value['offset'] raw_size = value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset,", "path: str offset: int size: int executable: bool def _read_exact(f, count): result =", "= f.read(count) if len(result) != count: raise ValueError('Unexpected end of Asar file') return", "return result def _expect(expectation): if not expectation: raise ValueError('Unexpected data in Asar file')", "b'\\0' * size) def _flatten(path, index): for key, value in index.items(): _expect(key and", "value['offset'] raw_size = value['size'] raw_executable = value.get('executable', False) _expect(isinstance(raw_offset, str)) _expect(isinstance(raw_size, int)) _expect(isinstance(raw_executable,", "_read_padding(f, padding_size) offset = 0 for r in sorted(_flatten((), header['files']), key=lambda r: r.offset):", "'files' in value: yield from _flatten(path + (key,), value['files']) else: raw_offset = value['offset']", "expectation: raise ValueError('Unexpected data in Asar file') def _read_uint4_le(f): return int.from_bytes(_read_exact(f, 4), 'little')", "json import tarfile from typing import NamedTuple _UINT32_SIZE = b'\\x04\\x00\\x00\\x00' class FileRecord(NamedTuple): path:", "r.offset): _expect(offset == r.offset) offset = r.offset + r.size info = tarfile.TarInfo(name='/'.join(r.path)) info.size", "info = tarfile.TarInfo(name='/'.join(r.path)) info.size = r.size info.mode = 0o755 if r.executable else 0o644", "_read_uint4_le(f) - 4 # header header _expect(_read_uint4_le(f) == header_pickled_size) # header header 2:" ]
[ "'<KEY> # SECURITY WARNING: don't run with debug turned on in production! DEBUG", "STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend',", "https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名", "on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their", "Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING:", "'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE", "'<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } # Password validation #", "EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER", "# Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, {", "REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ),", "'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES =", "'/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND", "(CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") #", "'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool',", "coole project. Generated by 'django-admin startproject' using Django 1.11.4. For more information on", "'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'],", "'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT =", "= True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD =", "# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', },", "https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY", "{ 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, {", "'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } # Password validation", "on in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST", "( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = {", "AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES':", "this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for", "'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432',", "files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\")", "\"\"\" import os import datetime # Build paths inside the project like this:", "out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT", "unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key", "}, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend',", "'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER':", "Django settings for coole project. Generated by 'django-admin startproject' using Django 1.11.4. For", "( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True,", "= os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST", "list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime", "USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) #", "JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL',", "] WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': {", "= 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated',", "Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER':", "1.11.4. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list", "[ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application' #", "] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND =", "EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD", "Django 1.11.4. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full", "secret! SECRET_KEY = '<KEY> # SECURITY WARNING: don't run with debug turned on", "'.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes',", "\"\"\" Django settings for coole project. Generated by 'django-admin startproject' using Django 1.11.4.", "# Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True", "see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/", "'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL =", "settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the", "this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values,", "'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) }", "#数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } # Password", "'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware',", "True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL", "( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', )", "using Django 1.11.4. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the", "their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime # Build paths inside", "}, }, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = {", "= True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application", "'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } #", "[ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE =", "= 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ =", "SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '<KEY>", "key used in production secret! SECRET_KEY = '<KEY> # SECURITY WARNING: don't run", "'django-admin startproject' using Django 1.11.4. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/", "'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA':", "'/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587", "# Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY", "True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images)", "keep the secret key used in production secret! SECRET_KEY = '<KEY> # SECURITY", "'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application'", "#数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators", "datetime # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR =", "like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable", "# https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N =", "development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep", "values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime # Build paths inside the", "the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os", "= 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = True # Static", "= { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>',", "of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime #", "project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings -", "project. Generated by 'django-admin startproject' using Django 1.11.4. For more information on this", "# Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework',", "] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES':", "= ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': (", "'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS = [ 'django.contrib.admin',", "production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000'", "] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF", "WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE':", ") REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt", "True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = ''", "'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS", "DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD':", "'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database #", "[ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', },", "'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES = [ {", "inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development", "JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build", "'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS =", "= True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/'", "'127.0.0.1:3000' # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles',", "ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS", "{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request',", "{ 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1',", "in production secret! SECRET_KEY = '<KEY> # SECURITY WARNING: don't run with debug", "['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], },", "= '127.0.0.1:3000' # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages',", "= [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE", "# https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static out", "STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS =", "= True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/", "WARNING: keep the secret key used in production secret! SECRET_KEY = '<KEY> #", "#数据库远程连接端口 } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME':", "= 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER =", "LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ", "'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = True # Static files", "'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = True", "'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//',", "https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime # Build paths inside the project like", "'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/", "'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ],", "turned on in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com']", "Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR,", "the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings", "https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS", "datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization #", "'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware',", "'/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR,", "os import datetime # Build paths inside the project like this: os.path.join(BASE_DIR, ...)", "] ROOT_URLCONF = 'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS':", "https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True", "WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS", "with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost", "don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS =", "'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK =", "Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N", "= os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//',", "['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS = [", "], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES =", "}, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User'", "{ 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀", "[ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES", "# django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30),", "= [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls'", "'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } }", "'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ]", "static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/'", "INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ]", "TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = True #", "[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug',", "'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', #", "), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH", "import os import datetime # Build paths inside the project like this: os.path.join(BASE_DIR,", "'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, },", "'5432', #数据库远程连接端口 } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ {", "= 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' DEFAULT_FROM_EMAIL =", "'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES", "the secret key used in production secret! SECRET_KEY = '<KEY> # SECURITY WARNING:", "{ 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码", "}, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', },", "'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口", "} } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',", "= os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/", "run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1',", "Generated by 'django-admin startproject' using Django 1.11.4. For more information on this file,", "= [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR,", "'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS':", "by 'django-admin startproject' using Django 1.11.4. For more information on this file, see", "Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME':", "# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production", "'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware',", "= [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [", "'127.0.0.1', 'PORT': '5432', #数据库远程连接端口 } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS =", "os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ #", "{ 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', )", "'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK", "{ 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS", "- unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret", "settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime # Build", "more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings", "', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth',", "paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start", "\"static\") # build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ]", "'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' DEFAULT_FROM_EMAIL = 'cool'", "{ 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ]", "AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': (", "datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } #", "} # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', },", "True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition", "ROOT_URLCONF = 'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True,", "STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT =", "os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'),", "{ 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION =", "'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 }", "'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans'", "'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT':", "startproject' using Django 1.11.4. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For", "# build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL", "file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see", "Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) #", "Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account',", "import datetime # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR", "( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication',", "'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': {", "True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE", "EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' DEFAULT_FROM_EMAIL", "'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST':", "'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates',", "USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS,", "DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' #", "USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL =", "see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime # Build paths inside the project", "See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret!", "'PORT': '5432', #数据库远程连接端口 } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [", "# https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名 'USER': 'root',", "'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = (", "= 'coole.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS':", "}, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default':", "HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions',", "'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE =", "'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION", "'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH =", "= [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',", "), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7),", "for coole project. Generated by 'django-admin startproject' using Django 1.11.4. For more information", "} JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX':", "}, ] AUTH_USER_MODEL = 'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = {", "True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT", "'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', # 请求头前缀 } # Internationalization", "'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware',", "'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES = [ { 'BACKEND':", "# SECURITY WARNING: don't run with debug turned on in production! DEBUG =", "definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care'", "https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, {", "= { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS':", "TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors':", "debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ',", "build static out STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL =", "os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST =", "secret key used in production secret! SECRET_KEY = '<KEY> # SECURITY WARNING: don't", "SECRET_KEY = '<KEY> # SECURITY WARNING: don't run with debug turned on in", "= '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static out STATICFILES_DIRS = [", "= '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS", "settings for coole project. Generated by 'django-admin startproject' using Django 1.11.4. For more", "os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production", "django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA':", "in production! DEBUG = True ALLOWED_HOSTS = ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST =", "Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, \"static\") # build static", "[ os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend',", "used in production secret! SECRET_KEY = '<KEY> # SECURITY WARNING: don't run with", "= 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql',", "= ['127.0.0.1', 'localhost ', '.gaonengyujing.com'] HOST = '127.0.0.1:3000' # Application definition INSTALLED_APPS =", "full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import", "# Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))", "For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of", "'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware',", "'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend',", "BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See", "'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',", "= '<KEY> # SECURITY WARNING: don't run with debug turned on in production!", "= True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript,", "'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth',", "'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/')", "'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases", "{ 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', # django-rest-framework-jwt ), 'DEFAULT_FILTER_BACKENDS': (", "MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF =", "MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True", "'rest_framework', 'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware',", "} # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N =", "MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'", "True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ]", "'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT = 587 EMAIL_HOST_USER = ''", "https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\"", "# 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai'", "'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH':", ") } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response',", "os.path.join(BASE_DIR, 'frontend', 'dist').replace('//', '/'), ] MEDIA_URL = '/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static',", "#数据库名 'USER': 'root', #数据库用户名 'PASSWORD': '<PASSWORD>', #数据库用户名密码 'HOST': '127.0.0.1', 'PORT': '5432', #数据库远程连接端口 }", "production secret! SECRET_KEY = '<KEY> # SECURITY WARNING: don't run with debug turned", "information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and", "'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'coole.urls' TEMPLATES = [", "'/frontend/static/profile/' MEDIA_ROOT = os.path.join(BASE_DIR, 'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS =", "...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production #", "and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import os import datetime # Build paths", "SECURITY WARNING: don't run with debug turned on in production! DEBUG = True", "'COOL', # 请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE =", "production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in", "'account.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ),", "# SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY =", "For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ \"\"\" import", "validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',", "'django.template.backends.django.DjangoTemplates', 'DIRS': ['frontend/dist'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages',", "AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME':", "for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used", "'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME':", "'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com' EMAIL_PORT", "'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'coole.wsgi.application' # Database", "'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE = [", "# Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME':'cool', #数据库名", "'frontend', 'static', 'profile').replace('//', '/') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = 'smtp-mail.outlook.com'", "请求头前缀 } # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-Hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N", "'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'account', 'care' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware',", "= { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=30), 'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_ALLOW_REFRESH': True, 'JWT_RESPONSE_PAYLOAD_HANDLER': 'account.jwt.custom_jwt_response', 'JWT_AUTH_HEADER_PREFIX': 'COOL', #", "'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL", "# Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT =" ]
[]
[ "validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType", "= { \"KeyspaceName\": (str, False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput", "( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\"", "license. # # *** Do not modify - this file is autogenerated ***", "`BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False),", "False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\":", "Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\":", "\"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str,", "False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True),", "(str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\": (str,", "<NAME> <<EMAIL>> # All rights reserved. # # See LICENSE file for full", "import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type =", "\"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn],", "(ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = {", "\"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput,", "AWSProperty, PropsDictType, Tags from .validators import boolean, integer from .validators.cassandra import ( validate_billingmode_mode,", "\"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\":", "import AWSObject, AWSProperty, PropsDictType, Tags from .validators import boolean, integer from .validators.cassandra import", "{ \"KeyspaceName\": (str, False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__", "(Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = {", "\"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer,", "} class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer,", "= \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\": (Tags, False), }", "True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\": (str, False), \"Tags\": (Tags, False),", "False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\":", "not modify - this file is autogenerated *** from . import AWSObject, AWSProperty,", "# See LICENSE file for full license. # # *** Do not modify", "props: PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject):", "class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\": (str, True),", "# Copyright (c) 2012-2022, <NAME> <<EMAIL>> # All rights reserved. # # See", "} class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\": (str,", "# # See LICENSE file for full license. # # *** Do not", "`Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\": (str, True),", "True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\":", "True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType", "= { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), }", "`Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str, False),", "} class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\": (Column,", "PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\"", "BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\":", "(str, True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type", "# *** Do not modify - this file is autogenerated *** from .", "class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\": (str, True),", "\"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\"", "\"\"\" props: PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\": (str, True), } class", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\": (str, True), }", "`Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode, False),", "= { \"ColumnName\": (str, True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn", "True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type =", "(c) 2012-2022, <NAME> <<EMAIL>> # All rights reserved. # # See LICENSE file", "\"\"\" props: PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), } class", ".validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\":", "([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\": (str, False), \"Tags\": (Tags,", "autogenerated *** from . import AWSObject, AWSProperty, PropsDictType, Tags from .validators import boolean,", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), }", "\"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class", "PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\"", "(integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean,", "`EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False),", "resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\": (Tags, False),", "full license. # # *** Do not modify - this file is autogenerated", "integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__", "(Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props:", "*** Do not modify - this file is autogenerated *** from . import", "\"\"\" props: PropsDictType = { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class", "\"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\":", "for full license. # # *** Do not modify - this file is", "Do not modify - this file is autogenerated *** from . import AWSObject,", "`ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True),", "props: PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False),", "class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer, True),", "= { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification,", "\"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\": (str, False), \"Tags\": (Tags, False), }", "\"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\":", "(str, True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props:", "class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = {", "False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType", "\"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\":", "- this file is autogenerated *** from . import AWSObject, AWSProperty, PropsDictType, Tags", "EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\":", "props: PropsDictType = { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty):", "\"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str,", "True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\":", "PropsDictType = { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\"", "`ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False),", "from . import AWSObject, AWSProperty, PropsDictType, Tags from .validators import boolean, integer from", "props: PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty):", "([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column],", "\"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType =", "(str, False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props:", "file is autogenerated *** from . import AWSObject, AWSProperty, PropsDictType, Tags from .validators", "{ \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__", "from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\"", "\"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType =", "Copyright (c) 2012-2022, <NAME> <<EMAIL>> # All rights reserved. # # See LICENSE", "<filename>troposphere/cassandra.py # Copyright (c) 2012-2022, <NAME> <<EMAIL>> # All rights reserved. # #", "False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False),", "PropsDictType = { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\"", "\"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\"", ". import AWSObject, AWSProperty, PropsDictType, Tags from .validators import boolean, integer from .validators.cassandra", "\"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType =", "= { \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\" `Table", "\"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\"", "class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True),", "is autogenerated *** from . import AWSObject, AWSProperty, PropsDictType, Tags from .validators import", "False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType", "PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\"", "{ \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False),", "*** from . import AWSObject, AWSProperty, PropsDictType, Tags from .validators import boolean, integer", "ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\":", "All rights reserved. # # See LICENSE file for full license. # #", "False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\":", "(validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props:", "(str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = {", "\"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby,", "\"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType =", "{ \"ColumnName\": (str, True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__", "= { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode", "} class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = { \"Mode\": (validate_billingmode_mode,", "{ \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__", "PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\":", "See LICENSE file for full license. # # *** Do not modify -", "from .validators import boolean, integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class", "# # *** Do not modify - this file is autogenerated *** from", "\"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\": (Tags,", "ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\": (Column, True), \"OrderBy\":", "{ \"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__", "\"EncryptionType\": (str, True), \"KmsKeyIdentifier\": (str, False), } class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\"", "= \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\":", "\"AWS::Cassandra::Keyspace\" props: PropsDictType = { \"KeyspaceName\": (str, False), \"Tags\": (Tags, False), } class", "(validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType = {", "\"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer,", "\"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\"", "class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = {", "} class Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType =", ".validators import boolean, integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject):", "{ \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__", "(integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props: PropsDictType = {", "class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\": (Column, True),", "validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props:", "True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-encryptionspecification.html>`__ \"\"\" props: PropsDictType", "\"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\": (str, False), \"Tags\":", "# All rights reserved. # # See LICENSE file for full license. #", "<<EMAIL>> # All rights reserved. # # See LICENSE file for full license.", "\"KeyspaceName\": (str, False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\"", "modify - this file is autogenerated *** from . import AWSObject, AWSProperty, PropsDictType,", "file for full license. # # *** Do not modify - this file", "\"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\":", "this file is autogenerated *** from . import AWSObject, AWSProperty, PropsDictType, Tags from", "props: PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty):", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType = { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), }", "True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\" props: PropsDictType", ") class Keyspace(AWSObject): \"\"\" `Keyspace <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-keyspace.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Keyspace\" props: PropsDictType =", "True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType", "(EncryptionSpecification, False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column],", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), }", "PropsDictType, Tags from .validators import boolean, integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby,", "Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\":", "\"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode,", "(BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str,", "resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False),", "props: PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty):", "reserved. # # See LICENSE file for full license. # # *** Do", "\"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props: PropsDictType =", "\"ColumnName\": (str, True), \"ColumnType\": (str, True), } class ClusteringKeyColumn(AWSProperty): \"\"\" `ClusteringKeyColumn <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-clusteringkeycolumn.html>`__ \"\"\"", "Table(AWSObject): \"\"\" `Table <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\":", "boolean, integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\" `Keyspace", "AWSObject, AWSProperty, PropsDictType, Tags from .validators import boolean, integer from .validators.cassandra import (", "} class Column(AWSProperty): \"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\": (str,", "LICENSE file for full license. # # *** Do not modify - this", "<http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cassandra-table.html>`__ \"\"\" resource_type = \"AWS::Cassandra::Table\" props: PropsDictType = { \"BillingMode\": (BillingMode, False), \"ClusteringKeyColumns\":", "False), \"KeyspaceName\": (str, True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False),", "PropsDictType = { \"Mode\": (validate_billingmode_mode, True), \"ProvisionedThroughput\": (ProvisionedThroughput, False), } class Column(AWSProperty): \"\"\"", "import boolean, integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, ) class Keyspace(AWSObject): \"\"\"", "\"\"\" props: PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), } class", "props: PropsDictType = { \"ReadCapacityUnits\": (integer, True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty):", "\"\"\" `Column <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-column.html>`__ \"\"\" props: PropsDictType = { \"ColumnName\": (str, True), \"ColumnType\": (str,", "2012-2022, <NAME> <<EMAIL>> # All rights reserved. # # See LICENSE file for", "Tags from .validators import boolean, integer from .validators.cassandra import ( validate_billingmode_mode, validate_clusteringkeycolumn_orderby, )", "(str, False), \"Tags\": (Tags, False), } class ProvisionedThroughput(AWSProperty): \"\"\" `ProvisionedThroughput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-provisionedthroughput.html>`__ \"\"\" props:", "True), \"PartitionKeyColumns\": ([Column], True), \"PointInTimeRecoveryEnabled\": (boolean, False), \"RegularColumns\": ([Column], False), \"TableName\": (str, False),", "(integer, True), \"WriteCapacityUnits\": (integer, True), } class BillingMode(AWSProperty): \"\"\" `BillingMode <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cassandra-table-billingmode.html>`__ \"\"\" props:", "False), \"ClusteringKeyColumns\": ([ClusteringKeyColumn], False), \"DefaultTimeToLive\": (integer, False), \"EncryptionSpecification\": (EncryptionSpecification, False), \"KeyspaceName\": (str, True),", "rights reserved. # # See LICENSE file for full license. # # ***", "= { \"Column\": (Column, True), \"OrderBy\": (validate_clusteringkeycolumn_orderby, False), } class EncryptionSpecification(AWSProperty): \"\"\" `EncryptionSpecification" ]
[ "total XP according to gain level \"\"\" total_xp = int((lvl * 10) **", "<= u: difficulty = 'Оч. легко' elif m * 2.5 <= u: difficulty", "if x == y: await call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return u.health,", "Specified number of digits :param n: Float \"\"\" multiplier = 10 ** decimals", "u.defence else: if e.damage > u.defence: miss_dmg = e.damage - u.defence u.health -=", "u.defence <= 0: u.health -= e.damage return u.health, u.defence else: if e.damage >", "e.defence = 0 return e.health, e.defence else: e.defence -= u.damage return e.health, e.defence", "u.defence = 0 return u.health, u.defence else: u.defence -= e.damage return u.health, u.defence", "in range(len(exams)): if user.rank == '-': return exams[0] elif exams[i].rank == user.rank: try:", ":param chance: Mob's chance of drop :return: True/False \"\"\" c = random.randint(1, 100)", "drop :return: True/False \"\"\" c = random.randint(1, 100) if c <= chance: return", "gain level \"\"\" total_xp = int((lvl * 10) ** 1.1) return total_xp *", "Example: '[3, 2]' => [3, 2] # :param u: User # :return: User's", "m * 2.5 <= u: difficulty = 'Легко' elif m * 2 <", "# return inventory def item_drop(chance): \"\"\" :param chance: Mob's chance of drop :return:", "to a specified number of digits. :param decimals: Specified number of digits :param", "Rounds a number down to a specified number of digits. :param decimals: Specified", "show_alert=True) return e.health, e.defence else: if e.defence <= 0: e.health -= u.damage return", "def item_drop(chance): \"\"\" :param chance: Mob's chance of drop :return: True/False \"\"\" c", ":param u: User # :return: User's inventory as list # \"\"\" # inventory", "down to a specified number of digits. :param decimals: Specified number of digits", "chance: return True return False def round_down(n, decimals=0): \"\"\" Rounds a number down", "m * 1.5 < u: difficulty = 'Сложно' elif m < u: difficulty", "y: await call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return u.health, u.defence else: if", "return e.health, e.defence else: if e.defence <= 0: e.health -= u.damage return e.health,", "else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae if", "else [] # return inventory def item_drop(chance): \"\"\" :param chance: Mob's chance of", "# Converts string from database to list # Example: '[3, 2]' => [3,", "e.health, e.defence else: e.defence -= u.damage return e.health, e.defence async def battle_defence(x, y,", "exam_choose(user): from app.models.examinators import exams for i in range(len(exams)): if user.rank == '-':", "decimals=0): \"\"\" Rounds a number down to a specified number of digits. :param", "/ multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result = [], [] if", "def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result = [], [] if lvl !=", "u.defence -= e.damage return u.health, u.defence def power(obj, maximal=False): if maximal is True:", "import json import math async def battle_attack(x, y, u, e, call): if x", "if m * 3 <= u: difficulty = 'Оч. легко' elif m *", "-= e.damage return u.health, u.defence else: if e.damage > u.defence: miss_dmg = e.damage", "range(len(exams)): if user.rank == '-': return exams[0] elif exams[i].rank == user.rank: try: return", "else: e.defence -= u.damage return e.health, e.defence async def battle_defence(x, y, u, e,", "for i in range(len(exams)): if user.rank == '-': return exams[0] elif exams[i].rank ==", "-= miss_dmg u.defence = 0 return u.health, u.defence else: u.defence -= e.damage return", "увернулся от удара\", show_alert=True) return u.health, u.defence else: if u.defence <= 0: u.health", "!= '[]' else [] # return inventory def item_drop(chance): \"\"\" :param chance: Mob's", "= round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4 print(multiplier) for stat in (u_attack,", "Mob's chance of drop :return: True/False \"\"\" c = random.randint(1, 100) if c", "elif m * 2.5 <= u: difficulty = 'Легко' elif m * 2", "e.defence <= 0: e.health -= u.damage return e.health, e.defence else: if u.damage >", "e, call): if x == y: await call.answer(\"❗ Ты увернулся от удара\", show_alert=True)", "except IndexError: return 'Максимальный ранг!' def set_difficulty(m, u): if m * 3 <=", "e.defence async def battle_defence(x, y, u, e, call): if x == y: await", "inventory as list # \"\"\" # inventory = json.loads(u['inventory']) if u['inventory'] != '[]'", "\"\"\" multiplier = 10 ** decimals return math.floor(n * multiplier) / multiplier def", "decimals: Specified number of digits :param n: Float \"\"\" multiplier = 10 **", "e.defence else: if u.damage > e.defence: miss_dmg = u.damage - e.defence e.health -=", "enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae > 1 else", "e.health -= miss_dmg e.defence = 0 return e.health, e.defence else: e.defence -= u.damage", "<= chance: return True return False def round_down(n, decimals=0): \"\"\" Rounds a number", "stat != 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result =", "app.models.examinators import exams for i in range(len(exams)): if user.rank == '-': return exams[0]", "if c <= chance: return True return False def round_down(n, decimals=0): \"\"\" Rounds", "Противник увернулся от удара\", show_alert=True) return e.health, e.defence else: if e.defence <= 0:", "return e.health, e.defence async def battle_defence(x, y, u, e, call): if x ==", "math async def battle_attack(x, y, u, e, call): if x == y: await", "item_drop(chance): \"\"\" :param chance: Mob's chance of drop :return: True/False \"\"\" c =", "enemy.append(round(stat*multiplier) if stat != 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2)", "list # \"\"\" # inventory = json.loads(u['inventory']) if u['inventory'] != '[]' else []", "+ obj.max_defence else: hp = obj.health + obj.defence return hp * obj.damage def", "u.defence: miss_dmg = e.damage - u.defence u.health -= miss_dmg u.defence = 0 return", "else: if e.damage > u.defence: miss_dmg = e.damage - u.defence u.health -= miss_dmg", "difficulty = 'Легко' elif m * 2 < u: difficulty = 'Нормально' elif", "* 1.5 < u: difficulty = 'Сложно' elif m < u: difficulty =", "e.health, e.defence else: if u.damage > e.defence: miss_dmg = u.damage - e.defence e.health", "* lvl # def json_inv(u): # \"\"\" # Converts string from database to", "specified number of digits. :param decimals: Specified number of digits :param n: Float", "Returns total XP according to gain level \"\"\" total_xp = int((lvl * 10)", "= 'Невозможно' else: return return difficulty def get_xp(lvl): \"\"\" Returns total XP according", "random.randint(1, 100) if c <= chance: return True return False def round_down(n, decimals=0):", "= e.damage - u.defence u.health -= miss_dmg u.defence = 0 return u.health, u.defence", "def exam_choose(user): from app.models.examinators import exams for i in range(len(exams)): if user.rank ==", "n: Float \"\"\" multiplier = 10 ** decimals return math.floor(n * multiplier) /", "!= 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy,", "10 ** decimals return math.floor(n * multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence,", "-= u.damage return e.health, e.defence else: if u.damage > e.defence: miss_dmg = u.damage", "import exams for i in range(len(exams)): if user.rank == '-': return exams[0] elif", "obj.health + obj.defence return hp * obj.damage def exam_choose(user): from app.models.examinators import exams", "exams for i in range(len(exams)): if user.rank == '-': return exams[0] elif exams[i].rank", "in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0 else 0) e_power =", "x == y: await call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return u.health, u.defence", "e.damage - u.defence u.health -= miss_dmg u.defence = 0 return u.health, u.defence else:", "from database to list # Example: '[3, 2]' => [3, 2] # :param", "0.4 print(multiplier) for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0", "= 'Сложно' elif m < u: difficulty = 'Очень сложно' elif m >", "1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4 print(multiplier) for stat", "database to list # Example: '[3, 2]' => [3, 2] # :param u:", "< u: difficulty = 'Сложно' elif m < u: difficulty = 'Очень сложно'", "e.defence -= u.damage return e.health, e.defence async def battle_defence(x, y, u, e, call):", "e.health, e.defence async def battle_defence(x, y, u, e, call): if x == y:", "'Оч. легко' elif m * 2.5 <= u: difficulty = 'Легко' elif m", "multiplier = 10 ** decimals return math.floor(n * multiplier) / multiplier def enemy_calc(u_attack,", "e.defence: miss_dmg = u.damage - e.defence e.health -= miss_dmg e.defence = 0 return", "u.health, u.defence else: if e.damage > u.defence: miss_dmg = e.damage - u.defence u.health", "call): if x == y: await call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return", "u * 3: difficulty = 'Верная смерть' elif m >= u: difficulty =", "# inventory = json.loads(u['inventory']) if u['inventory'] != '[]' else [] # return inventory", "set_difficulty(m, u): if m * 3 <= u: difficulty = 'Оч. легко' elif", "math.floor(n * multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result =", "maximal is True: hp = obj.max_health + obj.max_defence else: hp = obj.health +", "return e.health, e.defence else: e.defence -= u.damage return e.health, e.defence async def battle_defence(x,", "def power(obj, maximal=False): if maximal is True: hp = obj.max_health + obj.max_defence else:", "return math.floor(n * multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result", "= 'Нормально' elif m * 1.5 < u: difficulty = 'Сложно' elif m", "True/False \"\"\" c = random.randint(1, 100) if c <= chance: return True return", "(u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2])", "result = [], [] if lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1), 1)", "e.defence e.health -= miss_dmg e.defence = 0 return e.health, e.defence else: e.defence -=", "User's inventory as list # \"\"\" # inventory = json.loads(u['inventory']) if u['inventory'] !=", "u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae", "0: e.health -= u.damage return e.health, e.defence else: if u.damage > e.defence: miss_dmg", "'Легко' elif m * 2 < u: difficulty = 'Нормально' elif m *", "= 0.4 print(multiplier) for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat !=", "miss_dmg u.defence = 0 return u.health, u.defence else: u.defence -= e.damage return u.health,", "def set_difficulty(m, u): if m * 3 <= u: difficulty = 'Оч. легко'", "return u.health, u.defence else: if e.damage > u.defence: miss_dmg = e.damage - u.defence", "-= u.damage return e.health, e.defence async def battle_defence(x, y, u, e, call): if", "2 < u: difficulty = 'Нормально' elif m * 1.5 < u: difficulty", "1.1), 1) else: multiplier = 0.4 print(multiplier) for stat in (u_attack, u_health, u_defence):", "= int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae > 1 else 2] return", "u_defence): enemy.append(round(stat*multiplier) if stat != 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae =", "if user.rank == '-': return exams[0] elif exams[i].rank == user.rank: try: return exams[i", "of digits :param n: Float \"\"\" multiplier = 10 ** decimals return math.floor(n", "battle_defence(x, y, u, e, call): if x == y: await call.answer(\"❗ Ты увернулся", "enemy, result = [], [] if lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1),", "u: difficulty = 'Очень сложно' elif m > u * 3: difficulty =", "= random.randint(1, 100) if c <= chance: return True return False def round_down(n,", "1] except IndexError: return 'Максимальный ранг!' def set_difficulty(m, u): if m * 3", "else: return return difficulty def get_xp(lvl): \"\"\" Returns total XP according to gain", "obj.defence return hp * obj.damage def exam_choose(user): from app.models.examinators import exams for i", "u.damage return e.health, e.defence async def battle_defence(x, y, u, e, call): if x", "if stat != 0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result", "# :return: User's inventory as list # \"\"\" # inventory = json.loads(u['inventory']) if", "print(multiplier) for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0 else", ":param n: Float \"\"\" multiplier = 10 ** decimals return math.floor(n * multiplier)", "< u: difficulty = 'Очень сложно' elif m > u * 3: difficulty", "miss_dmg = e.damage - u.defence u.health -= miss_dmg u.defence = 0 return u.health,", "== '-': return exams[0] elif exams[i].rank == user.rank: try: return exams[i + 1]", "# def json_inv(u): # \"\"\" # Converts string from database to list #", "is True: hp = obj.max_health + obj.max_defence else: hp = obj.health + obj.defence", "await call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return e.health, e.defence else: if e.defence", "2]' => [3, 2] # :param u: User # :return: User's inventory as", "exams[i + 1] except IndexError: return 'Максимальный ранг!' def set_difficulty(m, u): if m", "import random import json import math async def battle_attack(x, y, u, e, call):", "u.damage - e.defence e.health -= miss_dmg e.defence = 0 return e.health, e.defence else:", "return inventory def item_drop(chance): \"\"\" :param chance: Mob's chance of drop :return: True/False", "elif m >= u: difficulty = 'Невозможно' else: return return difficulty def get_xp(lvl):", "of digits. :param decimals: Specified number of digits :param n: Float \"\"\" multiplier", "10) ** 1.1) return total_xp * lvl # def json_inv(u): # \"\"\" #", "= [], [] if lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else:", "multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result = [], []", "inventory def item_drop(chance): \"\"\" :param chance: Mob's chance of drop :return: True/False \"\"\"", "m >= u: difficulty = 'Невозможно' else: return return difficulty def get_xp(lvl): \"\"\"", "difficulty = 'Очень сложно' elif m > u * 3: difficulty = 'Верная", "elif m > u * 3: difficulty = 'Верная смерть' elif m >=", "elif m < u: difficulty = 'Очень сложно' elif m > u *", "m * 3 <= u: difficulty = 'Оч. легко' elif m * 2.5", "= int((lvl * 10) ** 1.1) return total_xp * lvl # def json_inv(u):", "u['inventory'] != '[]' else [] # return inventory def item_drop(chance): \"\"\" :param chance:", "'[3, 2]' => [3, 2] # :param u: User # :return: User's inventory", "0 else 0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae", "Converts string from database to list # Example: '[3, 2]' => [3, 2]", "+ 1] except IndexError: return 'Максимальный ранг!' def set_difficulty(m, u): if m *", "u: difficulty = 'Сложно' elif m < u: difficulty = 'Очень сложно' elif", "u, e, call): if x == y: await call.answer(\"❗ Противник увернулся от удара\",", "digits :param n: Float \"\"\" multiplier = 10 ** decimals return math.floor(n *", "y, u, e, call): if x == y: await call.answer(\"❗ Ты увернулся от", "u.defence def power(obj, maximal=False): if maximal is True: hp = obj.max_health + obj.max_defence", "* 10) ** 1.1) return total_xp * lvl # def json_inv(u): # \"\"\"", "hp * obj.damage def exam_choose(user): from app.models.examinators import exams for i in range(len(exams)):", "return exams[0] elif exams[i].rank == user.rank: try: return exams[i + 1] except IndexError:", "return u.health, u.defence else: u.defence -= e.damage return u.health, u.defence def power(obj, maximal=False):", "смерть' elif m >= u: difficulty = 'Невозможно' else: return return difficulty def", "battle_attack(x, y, u, e, call): if x == y: await call.answer(\"❗ Противник увернулся", "user.rank == '-': return exams[0] elif exams[i].rank == user.rank: try: return exams[i +", "elif m * 2 < u: difficulty = 'Нормально' elif m * 1.5", "3 <= u: difficulty = 'Оч. легко' elif m * 2.5 <= u:", "await call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return u.health, u.defence else: if u.defence", "elif m * 1.5 < u: difficulty = 'Сложно' elif m < u:", "u: difficulty = 'Невозможно' else: return return difficulty def get_xp(lvl): \"\"\" Returns total", "c <= chance: return True return False def round_down(n, decimals=0): \"\"\" Rounds a", "\"\"\" :param chance: Mob's chance of drop :return: True/False \"\"\" c = random.randint(1,", "exams[i].rank == user.rank: try: return exams[i + 1] except IndexError: return 'Максимальный ранг!'", "от удара\", show_alert=True) return e.health, e.defence else: if e.defence <= 0: e.health -=", "show_alert=True) return u.health, u.defence else: if u.defence <= 0: u.health -= e.damage return", "'Сложно' elif m < u: difficulty = 'Очень сложно' elif m > u", "= 0 return e.health, e.defence else: e.defence -= u.damage return e.health, e.defence async", "u.defence else: if u.defence <= 0: u.health -= e.damage return u.health, u.defence else:", "e.defence else: e.defence -= u.damage return e.health, e.defence async def battle_defence(x, y, u,", "от удара\", show_alert=True) return u.health, u.defence else: if u.defence <= 0: u.health -=", "100) if c <= chance: return True return False def round_down(n, decimals=0): \"\"\"", "Float \"\"\" multiplier = 10 ** decimals return math.floor(n * multiplier) / multiplier", "hp = obj.max_health + obj.max_defence else: hp = obj.health + obj.defence return hp", "multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4 print(multiplier) for stat in", "else: if e.defence <= 0: e.health -= u.damage return e.health, e.defence else: if", "e, call): if x == y: await call.answer(\"❗ Противник увернулся от удара\", show_alert=True)", "return True return False def round_down(n, decimals=0): \"\"\" Rounds a number down to", "to list # Example: '[3, 2]' => [3, 2] # :param u: User", "number down to a specified number of digits. :param decimals: Specified number of", "if u.defence <= 0: u.health -= e.damage return u.health, u.defence else: if e.damage", "User # :return: User's inventory as list # \"\"\" # inventory = json.loads(u['inventory'])", "* multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result = [],", "u: difficulty = 'Оч. легко' elif m * 2.5 <= u: difficulty =", "multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result = [], [] if lvl", "exams[0] elif exams[i].rank == user.rank: try: return exams[i + 1] except IndexError: return", "y, u, e, call): if x == y: await call.answer(\"❗ Противник увернулся от", "u.defence else: u.defence -= e.damage return u.health, u.defence def power(obj, maximal=False): if maximal", "try: return exams[i + 1] except IndexError: return 'Максимальный ранг!' def set_difficulty(m, u):", "# :param u: User # :return: User's inventory as list # \"\"\" #", "round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4 print(multiplier) for stat in (u_attack, u_health,", "u): if m * 3 <= u: difficulty = 'Оч. легко' elif m", "y: await call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return e.health, e.defence else: if", "u.health -= e.damage return u.health, u.defence else: if e.damage > u.defence: miss_dmg =", "'Верная смерть' elif m >= u: difficulty = 'Невозможно' else: return return difficulty", "=> [3, 2] # :param u: User # :return: User's inventory as list", "difficulty = 'Сложно' elif m < u: difficulty = 'Очень сложно' elif m", "'Нормально' elif m * 1.5 < u: difficulty = 'Сложно' elif m <", "u, e, call): if x == y: await call.answer(\"❗ Ты увернулся от удара\",", "json import math async def battle_attack(x, y, u, e, call): if x ==", "'Очень сложно' elif m > u * 3: difficulty = 'Верная смерть' elif", "miss_dmg e.defence = 0 return e.health, e.defence else: e.defence -= u.damage return e.health,", "# \"\"\" # inventory = json.loads(u['inventory']) if u['inventory'] != '[]' else [] #", "return u.health, u.defence else: if u.defence <= 0: u.health -= e.damage return u.health,", "\"\"\" total_xp = int((lvl * 10) ** 1.1) return total_xp * lvl #", "IndexError: return 'Максимальный ранг!' def set_difficulty(m, u): if m * 3 <= u:", "= json.loads(u['inventory']) if u['inventory'] != '[]' else [] # return inventory def item_drop(chance):", "difficulty def get_xp(lvl): \"\"\" Returns total XP according to gain level \"\"\" total_xp", "obj.damage def exam_choose(user): from app.models.examinators import exams for i in range(len(exams)): if user.rank", "total_xp = int((lvl * 10) ** 1.1) return total_xp * lvl # def", "according to gain level \"\"\" total_xp = int((lvl * 10) ** 1.1) return", "call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return e.health, e.defence else: if e.defence <=", "from app.models.examinators import exams for i in range(len(exams)): if user.rank == '-': return", "x == y: await call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return e.health, e.defence", "return return difficulty def get_xp(lvl): \"\"\" Returns total XP according to gain level", "<= u: difficulty = 'Легко' elif m * 2 < u: difficulty =", "u.damage > e.defence: miss_dmg = u.damage - e.defence e.health -= miss_dmg e.defence =", "e.defence else: if e.defence <= 0: e.health -= u.damage return e.health, e.defence else:", "= 10 ** decimals return math.floor(n * multiplier) / multiplier def enemy_calc(u_attack, u_health,", "0) e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae", "\"\"\" c = random.randint(1, 100) if c <= chance: return True return False", "def json_inv(u): # \"\"\" # Converts string from database to list # Example:", "= u.damage - e.defence e.health -= miss_dmg e.defence = 0 return e.health, e.defence", "0 return e.health, e.defence else: e.defence -= u.damage return e.health, e.defence async def", "json.loads(u['inventory']) if u['inventory'] != '[]' else [] # return inventory def item_drop(chance): \"\"\"", "e.damage > u.defence: miss_dmg = e.damage - u.defence u.health -= miss_dmg u.defence =", "inventory = json.loads(u['inventory']) if u['inventory'] != '[]' else [] # return inventory def", "return e.health, e.defence else: if u.damage > e.defence: miss_dmg = u.damage - e.defence", "= 'Оч. легко' elif m * 2.5 <= u: difficulty = 'Легко' elif", "int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae > 1 else 2] return result", "async def battle_defence(x, y, u, e, call): if x == y: await call.answer(\"❗", "удара\", show_alert=True) return u.health, u.defence else: if u.defence <= 0: u.health -= e.damage", "maximal=False): if maximal is True: hp = obj.max_health + obj.max_defence else: hp =", "ранг!' def set_difficulty(m, u): if m * 3 <= u: difficulty = 'Оч.", "True return False def round_down(n, decimals=0): \"\"\" Rounds a number down to a", "return hp * obj.damage def exam_choose(user): from app.models.examinators import exams for i in", "- e.defence e.health -= miss_dmg e.defence = 0 return e.health, e.defence else: e.defence", "2] # :param u: User # :return: User's inventory as list # \"\"\"", "c = random.randint(1, 100) if c <= chance: return True return False def", "for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0 else 0)", "a specified number of digits. :param decimals: Specified number of digits :param n:", "# \"\"\" # Converts string from database to list # Example: '[3, 2]'", "u: User # :return: User's inventory as list # \"\"\" # inventory =", "return False def round_down(n, decimals=0): \"\"\" Rounds a number down to a specified", "digits. :param decimals: Specified number of digits :param n: Float \"\"\" multiplier =", "else: u.defence -= e.damage return u.health, u.defence def power(obj, maximal=False): if maximal is", "get_xp(lvl): \"\"\" Returns total XP according to gain level \"\"\" total_xp = int((lvl", "number of digits. :param decimals: Specified number of digits :param n: Float \"\"\"", "def battle_defence(x, y, u, e, call): if x == y: await call.answer(\"❗ Ты", "call): if x == y: await call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return", "multiplier = 0.4 print(multiplier) for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat", "[] if lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier =", "# Example: '[3, 2]' => [3, 2] # :param u: User # :return:", "\"\"\" # inventory = json.loads(u['inventory']) if u['inventory'] != '[]' else [] # return", "int((lvl * 10) ** 1.1) return total_xp * lvl # def json_inv(u): #", "увернулся от удара\", show_alert=True) return e.health, e.defence else: if e.defence <= 0: e.health", "a number down to a specified number of digits. :param decimals: Specified number", "> u.defence: miss_dmg = e.damage - u.defence u.health -= miss_dmg u.defence = 0", "chance of drop :return: True/False \"\"\" c = random.randint(1, 100) if c <=", "[], [] if lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier", "-= miss_dmg e.defence = 0 return e.health, e.defence else: e.defence -= u.damage return", "difficulty = 'Верная смерть' elif m >= u: difficulty = 'Невозможно' else: return", "* 3: difficulty = 'Верная смерть' elif m >= u: difficulty = 'Невозможно'", "удара\", show_alert=True) return e.health, e.defence else: if e.defence <= 0: e.health -= u.damage", "[3, 2] # :param u: User # :return: User's inventory as list #", "- u.defence u.health -= miss_dmg u.defence = 0 return u.health, u.defence else: u.defence", "difficulty = 'Оч. легко' elif m * 2.5 <= u: difficulty = 'Легко'", "difficulty = 'Нормально' elif m * 1.5 < u: difficulty = 'Сложно' elif", "** 1.1) return total_xp * lvl # def json_inv(u): # \"\"\" # Converts", "* 2.5 <= u: difficulty = 'Легко' elif m * 2 < u:", "\"\"\" # Converts string from database to list # Example: '[3, 2]' =>", "< u: difficulty = 'Нормально' elif m * 1.5 < u: difficulty =", "1.5 < u: difficulty = 'Сложно' elif m < u: difficulty = 'Очень", ":return: User's inventory as list # \"\"\" # inventory = json.loads(u['inventory']) if u['inventory']", "if u['inventory'] != '[]' else [] # return inventory def item_drop(chance): \"\"\" :param", "False def round_down(n, decimals=0): \"\"\" Rounds a number down to a specified number", "i in range(len(exams)): if user.rank == '-': return exams[0] elif exams[i].rank == user.rank:", "as list # \"\"\" # inventory = json.loads(u['inventory']) if u['inventory'] != '[]' else", "\"\"\" Rounds a number down to a specified number of digits. :param decimals:", "> u * 3: difficulty = 'Верная смерть' elif m >= u: difficulty", "= 'Легко' elif m * 2 < u: difficulty = 'Нормально' elif m", ":param decimals: Specified number of digits :param n: Float \"\"\" multiplier = 10", "async def battle_attack(x, y, u, e, call): if x == y: await call.answer(\"❗", "m < u: difficulty = 'Очень сложно' elif m > u * 3:", "import math async def battle_attack(x, y, u, e, call): if x == y:", "else: if u.damage > e.defence: miss_dmg = u.damage - e.defence e.health -= miss_dmg", "def round_down(n, decimals=0): \"\"\" Rounds a number down to a specified number of", "round_down(n, decimals=0): \"\"\" Rounds a number down to a specified number of digits.", "= obj.max_health + obj.max_defence else: hp = obj.health + obj.defence return hp *", "else: hp = obj.health + obj.defence return hp * obj.damage def exam_choose(user): from", "= enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae > 1", "'Максимальный ранг!' def set_difficulty(m, u): if m * 3 <= u: difficulty =", "if e.defence <= 0: e.health -= u.damage return e.health, e.defence else: if u.damage", "decimals return math.floor(n * multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence, lvl): enemy,", "> e.defence: miss_dmg = u.damage - e.defence e.health -= miss_dmg e.defence = 0", "m > u * 3: difficulty = 'Верная смерть' elif m >= u:", ":return: True/False \"\"\" c = random.randint(1, 100) if c <= chance: return True", "= 0 return u.health, u.defence else: u.defence -= e.damage return u.health, u.defence def", "<= 0: u.health -= e.damage return u.health, u.defence else: if e.damage > u.defence:", "string from database to list # Example: '[3, 2]' => [3, 2] #", "return u.health, u.defence def power(obj, maximal=False): if maximal is True: hp = obj.max_health", "= 'Верная смерть' elif m >= u: difficulty = 'Невозможно' else: return return", "u: difficulty = 'Нормально' elif m * 1.5 < u: difficulty = 'Сложно'", "* 3 <= u: difficulty = 'Оч. легко' elif m * 2.5 <=", "u_health, u_defence, lvl): enemy, result = [], [] if lvl != 1: multiplier", "e.damage return u.health, u.defence else: if e.damage > u.defence: miss_dmg = e.damage -", "hp = obj.health + obj.defence return hp * obj.damage def exam_choose(user): from app.models.examinators", "True: hp = obj.max_health + obj.max_defence else: hp = obj.health + obj.defence return", "3: difficulty = 'Верная смерть' elif m >= u: difficulty = 'Невозможно' else:", "call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return u.health, u.defence else: if u.defence <=", "if maximal is True: hp = obj.max_health + obj.max_defence else: hp = obj.health", "difficulty = 'Невозможно' else: return return difficulty def get_xp(lvl): \"\"\" Returns total XP", "1) else: multiplier = 0.4 print(multiplier) for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier)", "if u.damage > e.defence: miss_dmg = u.damage - e.defence e.health -= miss_dmg e.defence", "легко' elif m * 2.5 <= u: difficulty = 'Легко' elif m *", "user.rank: try: return exams[i + 1] except IndexError: return 'Максимальный ранг!' def set_difficulty(m,", "'[]' else [] # return inventory def item_drop(chance): \"\"\" :param chance: Mob's chance", "e.health -= u.damage return e.health, e.defence else: if u.damage > e.defence: miss_dmg =", "obj.max_defence else: hp = obj.health + obj.defence return hp * obj.damage def exam_choose(user):", "Ты увернулся от удара\", show_alert=True) return u.health, u.defence else: if u.defence <= 0:", "u.health -= miss_dmg u.defence = 0 return u.health, u.defence else: u.defence -= e.damage", "list # Example: '[3, 2]' => [3, 2] # :param u: User #", "return difficulty def get_xp(lvl): \"\"\" Returns total XP according to gain level \"\"\"", "total_xp * lvl # def json_inv(u): # \"\"\" # Converts string from database", "to gain level \"\"\" total_xp = int((lvl * 10) ** 1.1) return total_xp", "of drop :return: True/False \"\"\" c = random.randint(1, 100) if c <= chance:", "e.health, e.defence else: if e.defence <= 0: e.health -= u.damage return e.health, e.defence", "def battle_attack(x, y, u, e, call): if x == y: await call.answer(\"❗ Противник", "lvl): enemy, result = [], [] if lvl != 1: multiplier = round_down(random.uniform(0.4,", "0 return u.health, u.defence else: u.defence -= e.damage return u.health, u.defence def power(obj,", "XP according to gain level \"\"\" total_xp = int((lvl * 10) ** 1.1)", "== y: await call.answer(\"❗ Ты увернулся от удара\", show_alert=True) return u.health, u.defence else:", "0: u.health -= e.damage return u.health, u.defence else: if e.damage > u.defence: miss_dmg", "'Невозможно' else: return return difficulty def get_xp(lvl): \"\"\" Returns total XP according to", "u.defence u.health -= miss_dmg u.defence = 0 return u.health, u.defence else: u.defence -=", "lvl # def json_inv(u): # \"\"\" # Converts string from database to list", "power(obj, maximal=False): if maximal is True: hp = obj.max_health + obj.max_defence else: hp", "formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae > 1 else 2]", "if lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4", "else: multiplier = 0.4 print(multiplier) for stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if", "u: difficulty = 'Легко' elif m * 2 < u: difficulty = 'Нормально'", "e.damage return u.health, u.defence def power(obj, maximal=False): if maximal is True: hp =", "== y: await call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return e.health, e.defence else:", "if x == y: await call.answer(\"❗ Противник увернулся от удара\", show_alert=True) return e.health,", "random import json import math async def battle_attack(x, y, u, e, call): if", "chance: Mob's chance of drop :return: True/False \"\"\" c = random.randint(1, 100) if", "else: if u.defence <= 0: u.health -= e.damage return u.health, u.defence else: if", "obj.max_health + obj.max_defence else: hp = obj.health + obj.defence return hp * obj.damage", "enemy_calc(u_attack, u_health, u_defence, lvl): enemy, result = [], [] if lvl != 1:", "\"\"\" Returns total XP according to gain level \"\"\" total_xp = int((lvl *", "** decimals return math.floor(n * multiplier) / multiplier def enemy_calc(u_attack, u_health, u_defence, lvl):", "stat in (u_attack, u_health, u_defence): enemy.append(round(stat*multiplier) if stat != 0 else 0) e_power", "return total_xp * lvl # def json_inv(u): # \"\"\" # Converts string from", "сложно' elif m > u * 3: difficulty = 'Верная смерть' elif m", "== user.rank: try: return exams[i + 1] except IndexError: return 'Максимальный ранг!' def", ">= u: difficulty = 'Невозможно' else: return return difficulty def get_xp(lvl): \"\"\" Returns", "= 'Очень сложно' elif m > u * 3: difficulty = 'Верная смерть'", "u.health, u.defence else: if u.defence <= 0: u.health -= e.damage return u.health, u.defence", "2.5 <= u: difficulty = 'Легко' elif m * 2 < u: difficulty", "u.damage return e.health, e.defence else: if u.damage > e.defence: miss_dmg = u.damage -", "1.1) return total_xp * lvl # def json_inv(u): # \"\"\" # Converts string", "return exams[i + 1] except IndexError: return 'Максимальный ранг!' def set_difficulty(m, u): if", "u.health, u.defence def power(obj, maximal=False): if maximal is True: hp = obj.max_health +", "<= 0: e.health -= u.damage return e.health, e.defence else: if u.damage > e.defence:", "'-': return exams[0] elif exams[i].rank == user.rank: try: return exams[i + 1] except", "json_inv(u): # \"\"\" # Converts string from database to list # Example: '[3,", "[] # return inventory def item_drop(chance): \"\"\" :param chance: Mob's chance of drop", "* obj.damage def exam_choose(user): from app.models.examinators import exams for i in range(len(exams)): if", "lvl != 1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4 print(multiplier)", "+ obj.defence return hp * obj.damage def exam_choose(user): from app.models.examinators import exams for", "miss_dmg = u.damage - e.defence e.health -= miss_dmg e.defence = 0 return e.health,", "u_defence, lvl): enemy, result = [], [] if lvl != 1: multiplier =", "-= e.damage return u.health, u.defence def power(obj, maximal=False): if maximal is True: hp", "!= 1: multiplier = round_down(random.uniform(0.4, 1.1), 1) else: multiplier = 0.4 print(multiplier) for", "number of digits :param n: Float \"\"\" multiplier = 10 ** decimals return", "* 2 < u: difficulty = 'Нормально' elif m * 1.5 < u:", "= obj.health + obj.defence return hp * obj.damage def exam_choose(user): from app.models.examinators import", "level \"\"\" total_xp = int((lvl * 10) ** 1.1) return total_xp * lvl", "if e.damage > u.defence: miss_dmg = e.damage - u.defence u.health -= miss_dmg u.defence", "elif exams[i].rank == user.rank: try: return exams[i + 1] except IndexError: return 'Максимальный", "def get_xp(lvl): \"\"\" Returns total XP according to gain level \"\"\" total_xp =", "e_power = enemy[0]*(enemy[1]+enemy[2]) formulae = int((e_power/(lvl**1.45))*2) result = [enemy, formulae if formulae >", "return 'Максимальный ранг!' def set_difficulty(m, u): if m * 3 <= u: difficulty", "u.health, u.defence else: u.defence -= e.damage return u.health, u.defence def power(obj, maximal=False): if", "m * 2 < u: difficulty = 'Нормально' elif m * 1.5 <" ]
[ "def __init__(self, x, y, background): self.__x = x self.__y = y #self.__image =", "self.__y - self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x,", "pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components = [] self.__border = Border(x -10, y", "on_click(self, x, y): for component in self.__components: component.on_click(x,y) def on_release(self, x, y): for", "Button from .border import Border class Menu: def __init__(self, x, y, background): self.__x", "+ 40) else: x = self.__x + 10 y = self.__y - 10", "0: x = self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() + 40) else: x", "if component.width() + 10 + 20 > self.components_width: self.components_width = component.width() + 10", "+ 20 > self.components_width: self.components_width = component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum)", "= self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: # x = self.__x + 10", "10 # y = self.__y - 10 #button = Button(text, x, y) #self.__components.append(button)", "text, background): if len(self.__components) > 0: x = self.__components[-1].x() y = self.__components[-1].y() -", "def add_button(self, text, background): if len(self.__components) > 0: x = self.__components[-1].x() y =", "= self.__x + self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom = self.__y return", "component in self.__components: component.render() self.component_height_sum += component.height() + 40 self.components_width = 0 for", "self.__x x1right = self.__x + self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom =", "#if len(self.__components) > 0: # x = self.__component[-1].x() + 10 # y =", "x, y) #self.__components.append(button) def add_button(self, text, background): if len(self.__components) > 0: x =", "= self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() + 40) else: x = self.__x", "> self.components_width: self.components_width = component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self,", "20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components) > 0: # x =", "= [] self.__border = Border(x -10, y + 5, background) def render(self): #self.__sprite.draw()", "x, y, background) self.__components.append(button) def withinBoundry(self, x, y): x1left = self.__x x1right =", "self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height)", "x1right = self.__x + self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom = self.__y", "component in self.__components: component.on_hover(x,y) def on_click(self, x, y): for component in self.__components: component.on_click(x,y)", "= Border(x -10, y + 5, background) def render(self): #self.__sprite.draw() self.component_height_sum = 0", "= Button(text, x, y, background) self.__components.append(button) def withinBoundry(self, x, y): x1left = self.__x", "class Menu: def __init__(self, x, y, background): self.__x = x self.__y = y", "y): x1left = self.__x x1right = self.__x + self.__label.content_width y1top = self.__y -", "component in self.__components: if component.width() + 10 + 20 > self.components_width: self.components_width =", "y - self.__image.height) self.__components = [] self.__border = Border(x -10, y + 5,", "y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components =", "background): self.__x = x self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image,", "= x self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y", "+ 10 # y = self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: # x", "40 self.components_width = 0 for component in self.__components: if component.width() + 10 +", "self.__components: if component.width() + 10 + 20 > self.components_width: self.components_width = component.width() +", "y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y): for component", "x, y, background): self.__x = x self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite", "- 10 #button = Button(text, x, y) #self.__components.append(button) def add_button(self, text, background): if", "#self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components = [] self.__border = Border(x", "y) #self.__components.append(button) def add_button(self, text, background): if len(self.__components) > 0: x = self.__components[-1].x()", "> 0: x = self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() + 40) else:", "= self.__x + 10 y = self.__y - 10 button = Button(text, x,", "y, background): self.__x = x self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite =", "self.__components.append(button) def withinBoundry(self, x, y): x1left = self.__x x1right = self.__x + self.__label.content_width", "40) else: x = self.__x + 10 y = self.__y - 10 button", "- self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y):", "= self.__y - self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self,", "#self.__components.append(button) def add_button(self, text, background): if len(self.__components) > 0: x = self.__components[-1].x() y", "from .button import Button from .border import Border class Menu: def __init__(self, x,", "component.width() + 10 + 20 > self.components_width: self.components_width = component.width() + 10 +", "+ 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components) > 0:", "on_hover(self, x, y): for component in self.__components: component.on_hover(x,y) def on_click(self, x, y): for", "#def add_button(self, text): #if len(self.__components) > 0: # x = self.__component[-1].x() + 10", "(x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y): for component in self.__components: component.on_hover(x,y) def", "background) def render(self): #self.__sprite.draw() self.component_height_sum = 0 for component in self.__components: component.render() self.component_height_sum", "10 # y = self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: # x =", "x, y): for component in self.__components: component.on_hover(x,y) def on_click(self, x, y): for component", "Button(text, x, y) #self.__components.append(button) def add_button(self, text, background): if len(self.__components) > 0: x", "y + 5, background) def render(self): #self.__sprite.draw() self.component_height_sum = 0 for component in", "Menu: def __init__(self, x, y, background): self.__x = x self.__y = y #self.__image", "background) self.__components.append(button) def withinBoundry(self, x, y): x1left = self.__x x1right = self.__x +", "in self.__components: component.render() self.component_height_sum += component.height() + 40 self.components_width = 0 for component", "#self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components = []", "x1left = self.__x x1right = self.__x + self.__label.content_width y1top = self.__y - self.__label.content_height", "10) #else: # x = self.__x + 10 # y = self.__y -", "= y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components", "+ 40 self.components_width = 0 for component in self.__components: if component.width() + 10", "x = self.__x + 10 # y = self.__y - 10 #button =", "= 0 for component in self.__components: component.render() self.component_height_sum += component.height() + 40 self.components_width", "+ 10 + 20 > self.components_width: self.components_width = component.width() + 10 + 20", "= self.__component[-1].x() + 10 # y = self.__components[-1].y() - (self.__components[-1].content_height + 10) #else:", "else: x = self.__x + 10 y = self.__y - 10 button =", "button = Button(text, x, y, background) self.__components.append(button) def withinBoundry(self, x, y): x1left =", "# y = self.__y - 10 #button = Button(text, x, y) #self.__components.append(button) def", "+ 10 y = self.__y - 10 button = Button(text, x, y, background)", "component.on_hover(x,y) def on_click(self, x, y): for component in self.__components: component.on_click(x,y) def on_release(self, x,", "def on_click(self, x, y): for component in self.__components: component.on_click(x,y) def on_release(self, x, y):", "Border class Menu: def __init__(self, x, y, background): self.__x = x self.__y =", "20 > self.components_width: self.components_width = component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def", "<gh_stars>0 import pyglet from .button import Button from .border import Border class Menu:", "0 for component in self.__components: if component.width() + 10 + 20 > self.components_width:", "and (y1bottom>y>y1top) def on_hover(self, x, y): for component in self.__components: component.on_hover(x,y) def on_click(self,", "= self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y): for component in", "> 0: # x = self.__component[-1].x() + 10 # y = self.__components[-1].y() -", "self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y): for", "def render(self): #self.__sprite.draw() self.component_height_sum = 0 for component in self.__components: component.render() self.component_height_sum +=", "x = self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() + 40) else: x =", "self.__x + 10 # y = self.__y - 10 #button = Button(text, x,", "- self.__image.height) self.__components = [] self.__border = Border(x -10, y + 5, background)", "y): for component in self.__components: component.on_hover(x,y) def on_click(self, x, y): for component in", "# y = self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: # x = self.__x", "self.component_height_sum) #def add_button(self, text): #if len(self.__components) > 0: # x = self.__component[-1].x() +", "add_button(self, text): #if len(self.__components) > 0: # x = self.__component[-1].x() + 10 #", "in self.__components: component.on_hover(x,y) def on_click(self, x, y): for component in self.__components: component.on_click(x,y) def", "pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components = [] self.__border =", "y = self.__components[-1].y() - (self.__components[-1].height() + 40) else: x = self.__x + 10", "0 for component in self.__components: component.render() self.component_height_sum += component.height() + 40 self.components_width =", "= self.__x x1right = self.__x + self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom", "self.__border = Border(x -10, y + 5, background) def render(self): #self.__sprite.draw() self.component_height_sum =", "(y1bottom>y>y1top) def on_hover(self, x, y): for component in self.__components: component.on_hover(x,y) def on_click(self, x,", "y = self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: # x = self.__x +", "for component in self.__components: component.on_hover(x,y) def on_click(self, x, y): for component in self.__components:", "10 button = Button(text, x, y, background) self.__components.append(button) def withinBoundry(self, x, y): x1left", "text): #if len(self.__components) > 0: # x = self.__component[-1].x() + 10 # y", "self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() + 40) else: x = self.__x +", "return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y): for component in self.__components: component.on_hover(x,y)", "render(self): #self.__sprite.draw() self.component_height_sum = 0 for component in self.__components: component.render() self.component_height_sum += component.height()", "component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components) >", "+ self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and", "self.__component[-1].x() + 10 # y = self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: #", "y): for component in self.__components: component.on_click(x,y) def on_release(self, x, y): for component in", "self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top)", "self.components_width: self.components_width = component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text):", "add_button(self, text, background): if len(self.__components) > 0: x = self.__components[-1].x() y = self.__components[-1].y()", "component.render() self.component_height_sum += component.height() + 40 self.components_width = 0 for component in self.__components:", "len(self.__components) > 0: # x = self.__component[-1].x() + 10 # y = self.__components[-1].y()", "import Button from .border import Border class Menu: def __init__(self, x, y, background):", "import pyglet from .button import Button from .border import Border class Menu: def", "for component in self.__components: component.render() self.component_height_sum += component.height() + 40 self.components_width = 0", "self.components_width = 0 for component in self.__components: if component.width() + 10 + 20", "+ 10) #else: # x = self.__x + 10 # y = self.__y", "if len(self.__components) > 0: x = self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() +", "component in self.__components: component.on_click(x,y) def on_release(self, x, y): for component in self.__components: component.on_release(x,y)", "self.component_height_sum += component.height() + 40 self.components_width = 0 for component in self.__components: if", "10 #button = Button(text, x, y) #self.__components.append(button) def add_button(self, text, background): if len(self.__components)", "from .border import Border class Menu: def __init__(self, x, y, background): self.__x =", "self.__x = x self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x,", "x self.__y = y #self.__image = pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y -", "+ 10 # y = self.__y - 10 #button = Button(text, x, y)", "= component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components)", "self.__components: component.on_hover(x,y) def on_click(self, x, y): for component in self.__components: component.on_click(x,y) def on_release(self,", "pyglet from .button import Button from .border import Border class Menu: def __init__(self,", "# x = self.__component[-1].x() + 10 # y = self.__components[-1].y() - (self.__components[-1].content_height +", "self.__y - 10 #button = Button(text, x, y) #self.__components.append(button) def add_button(self, text, background):", "withinBoundry(self, x, y): x1left = self.__x x1right = self.__x + self.__label.content_width y1top =", "- (self.__components[-1].content_height + 10) #else: # x = self.__x + 10 # y", "def withinBoundry(self, x, y): x1left = self.__x x1right = self.__x + self.__label.content_width y1top", "len(self.__components) > 0: x = self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height() + 40)", "self.__y - 10 button = Button(text, x, y, background) self.__components.append(button) def withinBoundry(self, x,", "- (self.__components[-1].height() + 40) else: x = self.__x + 10 y = self.__y", "self.__components[-1].y() - (self.__components[-1].content_height + 10) #else: # x = self.__x + 10 #", "y = self.__y - 10 button = Button(text, x, y, background) self.__components.append(button) def", "y = self.__y - 10 #button = Button(text, x, y) #self.__components.append(button) def add_button(self,", "x = self.__component[-1].x() + 10 # y = self.__components[-1].y() - (self.__components[-1].content_height + 10)", "self.__x + self.__label.content_width y1top = self.__y - self.__label.content_height y1bottom = self.__y return (x1right>x>x1left)", "(self.__components[-1].height() + 40) else: x = self.__x + 10 y = self.__y -", "# x = self.__x + 10 # y = self.__y - 10 #button", "y, background) self.__components.append(button) def withinBoundry(self, x, y): x1left = self.__x x1right = self.__x", "#self.__sprite.draw() self.component_height_sum = 0 for component in self.__components: component.render() self.component_height_sum += component.height() +", "self.component_height_sum = 0 for component in self.__components: component.render() self.component_height_sum += component.height() + 40", "0: # x = self.__component[-1].x() + 10 # y = self.__components[-1].y() - (self.__components[-1].content_height", "self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def on_hover(self, x, y): for component in self.__components:", "[] self.__border = Border(x -10, y + 5, background) def render(self): #self.__sprite.draw() self.component_height_sum", "= 0 for component in self.__components: if component.width() + 10 + 20 >", "__init__(self, x, y, background): self.__x = x self.__y = y #self.__image = pyglet.image.load(background)", "10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components) > 0: #", "import Border class Menu: def __init__(self, x, y, background): self.__x = x self.__y", ".border import Border class Menu: def __init__(self, x, y, background): self.__x = x", "self.__components: component.render() self.component_height_sum += component.height() + 40 self.components_width = 0 for component in", "for component in self.__components: if component.width() + 10 + 20 > self.components_width: self.components_width", "component.height() + 40 self.components_width = 0 for component in self.__components: if component.width() +", "x = self.__x + 10 y = self.__y - 10 button = Button(text,", "self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components) > 0: # x = self.__component[-1].x()", "= Button(text, x, y) #self.__components.append(button) def add_button(self, text, background): if len(self.__components) > 0:", "for component in self.__components: component.on_click(x,y) def on_release(self, x, y): for component in self.__components:", "+ 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if len(self.__components) > 0: # x", "#else: # x = self.__x + 10 # y = self.__y - 10", "Button(text, x, y, background) self.__components.append(button) def withinBoundry(self, x, y): x1left = self.__x x1right", "y1top = self.__y - self.__label.content_height y1bottom = self.__y return (x1right>x>x1left) and (y1bottom>y>y1top) def", "self.__image.height) self.__components = [] self.__border = Border(x -10, y + 5, background) def", "10 + 20 > self.components_width: self.components_width = component.width() + 10 + 20 self.__border.render(self.components_width,", "= pyglet.image.load(background) #self.__sprite = pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components = [] self.__border", "#button = Button(text, x, y) #self.__components.append(button) def add_button(self, text, background): if len(self.__components) >", "(self.__components[-1].content_height + 10) #else: # x = self.__x + 10 # y =", "= self.__x + 10 # y = self.__y - 10 #button = Button(text,", "- 10 button = Button(text, x, y, background) self.__components.append(button) def withinBoundry(self, x, y):", "10 y = self.__y - 10 button = Button(text, x, y, background) self.__components.append(button)", ".button import Button from .border import Border class Menu: def __init__(self, x, y,", "self.__components = [] self.__border = Border(x -10, y + 5, background) def render(self):", "background): if len(self.__components) > 0: x = self.__components[-1].x() y = self.__components[-1].y() - (self.__components[-1].height()", "self.components_width = component.width() + 10 + 20 self.__border.render(self.components_width, self.component_height_sum) #def add_button(self, text): #if", "x, y): x1left = self.__x x1right = self.__x + self.__label.content_width y1top = self.__y", "= self.__y - 10 button = Button(text, x, y, background) self.__components.append(button) def withinBoundry(self,", "x, y - self.__image.height) self.__components = [] self.__border = Border(x -10, y +", "= self.__components[-1].y() - (self.__components[-1].height() + 40) else: x = self.__x + 10 y", "self.__components[-1].y() - (self.__components[-1].height() + 40) else: x = self.__x + 10 y =", "Border(x -10, y + 5, background) def render(self): #self.__sprite.draw() self.component_height_sum = 0 for", "-10, y + 5, background) def render(self): #self.__sprite.draw() self.component_height_sum = 0 for component", "def on_hover(self, x, y): for component in self.__components: component.on_hover(x,y) def on_click(self, x, y):", "= self.__y - 10 #button = Button(text, x, y) #self.__components.append(button) def add_button(self, text,", "5, background) def render(self): #self.__sprite.draw() self.component_height_sum = 0 for component in self.__components: component.render()", "self.__x + 10 y = self.__y - 10 button = Button(text, x, y,", "= pyglet.sprite.Sprite(self.__image, x, y - self.__image.height) self.__components = [] self.__border = Border(x -10,", "+ 5, background) def render(self): #self.__sprite.draw() self.component_height_sum = 0 for component in self.__components:", "x, y): for component in self.__components: component.on_click(x,y) def on_release(self, x, y): for component", "in self.__components: if component.width() + 10 + 20 > self.components_width: self.components_width = component.width()", "+= component.height() + 40 self.components_width = 0 for component in self.__components: if component.width()" ]
[ "from the Mongo collection and transform into a pandas dataframe :projection: A dictionary", "pd.DataFrame: \"\"\" Load the data from the Mongo collection and transform into a", "data from the Mongo collection and transform into a pandas dataframe :projection: A", "db def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the data from the Mongo", "dataframe :projection: A dictionary with the fields to load from database :return: A", "collection and transform into a pandas dataframe :projection: A dictionary with the fields", "with the fields to load from database :return: A pandas dataframe with the", "and transform into a pandas dataframe :projection: A dictionary with the fields to", ":return: A pandas dataframe with the data \"\"\" articles = db.read_articles( projection=projection )", "the data from the Mongo collection and transform into a pandas dataframe :projection:", "load from database :return: A pandas dataframe with the data \"\"\" articles =", "from database :return: A pandas dataframe with the data \"\"\" articles = db.read_articles(", "-> pd.DataFrame: \"\"\" Load the data from the Mongo collection and transform into", "news_classifier.database import db def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the data from", "A pandas dataframe with the data \"\"\" articles = db.read_articles( projection=projection ) return", "into a pandas dataframe :projection: A dictionary with the fields to load from", "pandas dataframe with the data \"\"\" articles = db.read_articles( projection=projection ) return pd.DataFrame(articles)", "Load the data from the Mongo collection and transform into a pandas dataframe", "as pd from news_classifier.database import db def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load", "import db def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the data from the", ":projection: A dictionary with the fields to load from database :return: A pandas", "load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the data from the Mongo collection and", "database :return: A pandas dataframe with the data \"\"\" articles = db.read_articles( projection=projection", "to load from database :return: A pandas dataframe with the data \"\"\" articles", "pandas dataframe :projection: A dictionary with the fields to load from database :return:", "transform into a pandas dataframe :projection: A dictionary with the fields to load", "the fields to load from database :return: A pandas dataframe with the data", "Mongo collection and transform into a pandas dataframe :projection: A dictionary with the", "A dictionary with the fields to load from database :return: A pandas dataframe", "from news_classifier.database import db def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the data", "a pandas dataframe :projection: A dictionary with the fields to load from database", "pd from news_classifier.database import db def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the", "dictionary with the fields to load from database :return: A pandas dataframe with", "def load_data(projection: dict) -> pd.DataFrame: \"\"\" Load the data from the Mongo collection", "pandas as pd from news_classifier.database import db def load_data(projection: dict) -> pd.DataFrame: \"\"\"", "the Mongo collection and transform into a pandas dataframe :projection: A dictionary with", "fields to load from database :return: A pandas dataframe with the data \"\"\"", "dict) -> pd.DataFrame: \"\"\" Load the data from the Mongo collection and transform", "import pandas as pd from news_classifier.database import db def load_data(projection: dict) -> pd.DataFrame:", "\"\"\" Load the data from the Mongo collection and transform into a pandas" ]
[ "# matrix of type uint8 called 'LinkeTurbidity'. The rows represent global # latitudes", "beam, global, and diffuse components. Reference [2]_ provides comparisons with other clear sky", "add previous Dec and next Jan # to the array so that the", "several methods to calculate clear sky GHI, DNI, and DHI. \"\"\" from __future__", "needed in each # function i0p = _calc_i0p(dni_extra, w, aod700, p) taub =", "0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b = b1", "have the best performance of models which require only zenith angle [3]. Extreme", "to have the best performance of models which require only zenith angle [3].", "ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to", "water of the atmosphere (cm). Algorithm derived for values between 0.2 and 10", "* Linke Turbidity, # so divide the number from the file by 20", "== 1: taud = taud[0] return taud def _calc_d(w, aod700, p): \"\"\"Calculate the", "p) taub = _calc_taub(w, aod700, p) b = _calc_b(w, aod700) taug = _calc_taug(w,", "This # is accomplished by judicious use and placement of np.maximum, # np.minimum,", "15, 20, and 18 W/m^2 for the beam, global, and diffuse components. Reference", "use clearsky.ineichen if you ' + 'supply your own turbidities.') if filepath is", "[3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation and", "covers 1 year. # Assume that data corresponds to the value at #", "if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None,", "fast already, but it could be made # faster by precalculating the powers", "156 publication (notably the fh2-(TL-1) should be fh2 * # (TL-1)). # The", "* np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith", "* np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p *", "tg0 + tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g", "---------- [1] <NAME> and <NAME>, \"A New airmass independent formulation for the Linke", "the fh2-(TL-1) should be fh2 * # (TL-1)). # The NaN handling is", "this result! Parameters ---------- apparent_zenith : Series The apparent (refraction corrected) sun zenith", "return g def _calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests", "model. Initial implementation of this algorithm by <NAME>. References ---------- [1] <NAME>, \"Insolation", "a position on the Earth's surface for a given month do the #", "np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p * np.exp(-taud/sin_elev**d)", "\"Insolation in Relation to Cloudiness and Cloud Density,\" Journal of Meteorology, vol. 2,", "np.fmax(bnci, 0) # \"empirical correction\" SE 73, 157 & SE 73, 312. bnci_2", "appear to use the new turbidity factor (item # 2 above) in either", "= np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days,", "the corrected TL for TL < 2 # TLcorr = TL; # TLcorr(TL", "filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index =", "We used the # equation from pg 311 because of the existence of", ": string The path to the ``.mat`` file. interp_turbidity : bool If ``True``,", "``clearsky`` module contains several methods to calculate clear sky GHI, DNI, and DHI.", "the taub coefficient\"\"\" p0 = 101325. tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2", "0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub = tb1*aod700", "The ``clearsky`` module contains several methods to calculate clear sky GHI, DNI, and", "7000 m, or 101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The units", "input is likely to # have NaNs that we'll want to map to", "tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate the g", "in eqn 9 and appendix A, and 3) Global horizontal model in #", "+ 0.2846*aod700 + 0.3798 return g def _calc_taud(w, aod700, p): \"\"\"Calculate the taud", "al., \"A New Operational Model for Satellite-Derived Irradiances: Description and Validation\", Solar Energy,", "to 0s. multiply and divide by tl to # reinsert tl nans ghi", "if interp_turbidity: # Data covers 1 year. # Assume that data corresponds to", "- dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] =", "of models which require only zenith angle [3]. Extreme care should be taken", "b1 * np.log(w) + b0 return b def _calc_taug(w, aod700, p): \"\"\"Calculate the", "vol. 2, pp. 154-166, 1945. [2] <NAME>, \"Insolation in Relation to Cloud Type,\"", "Linke Turbidity. altitude: numeric Altitude above sea level in meters. dni_extra: numeric Extraterrestrial", "0.5057*aod700 + 0.4557 b = b1 * np.log(w) + b0 return b def", "Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>, et. al.,", "World Congress, June 2003. Goteborg, Sweden. ''' # Dan's note on the TL", "* np.exp(-0.09 * airmass_absolute * (tl - 1)) bnci = dni_extra * np.fmax(bnci,", "lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the Linke Turibidity from the", "the beam, global, and diffuse components. Reference [2]_ provides comparisons with other clear", "sea level and 7000 m, or 101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial", "0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05", "the publication # on pages 151-157, Ineichen and Perez introduce (among other #", "models. The phrasing of # appendix A seems as if there are two", "nans ghi = cg1 * dni_extra * cos_zenith * tl / tl *", "pressure w = precipitable_water # algorithm fails for pw < 0.2 if np.isscalar(w):", "units of the output. Returns ------- clearsky : DataFrame (if Series input) or", "bool If ``True``, interpolates the monthly Linke turbidity values found in ``LinkeTurbidities.mat`` to", "def _calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0 = 101325. dp =", "73, pg 311. Full ref: Perez # et. al., Vol. 73, pp. 307-317", "beam/GHI models, and the # second correction is used to correct the revised", "clearsky.ineichen if you ' + 'supply your own turbidities.') if filepath is None:", "np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude + 0.868 cg2 = 3.92e-05 * altitude", "isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True):", "about matching the output type to the input type(s) if len(taud) == 1:", "sun zenith angle in degrees. Returns ------- pd.Series The modeled global horizonal irradiance", "and <NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation and Analysis\", Sandia National", "linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky GHI from Haurwitz model. Implements the", "for values between 0 and 0.45. precipitable_water: numeric The precipitable water of the", "cg2 = 3.92e-05 * altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl -", "that estimate the clear sky global and beam solar irradiance,\" Solar Energy, 132,", "coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g def", "b0 return b def _calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0 =", "# use max so that nighttime values will result in 0s instead of", "on clear sky models found the Haurwitz model to have the best performance", "index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity = linke_turbidity.apply(lambda", "index=time) # apply monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /=", "numpy as np import pandas as pd from pvlib import tools def ineichen(apparent_zenith,", "to the value at # the middle of each month. # This means", "to correct the # turbidity factor used in the beam/GHI models. # Create", "inputmin outputrange = outputmax - outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin", "fmax to map airmass nans to 0s. multiply and divide by tl to", "sky GHI, DNI, and DHI from Ineichen/Perez model. Implements the Ineichen and Perez", "the simplified Solis model [1]_. Reference [1]_ describes the accuracy of the model", "return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky", "simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky GHI, DNI, and", "of the sun above the horizon (deg). aod700: numeric The aerosol optical depth", "= _calc_b(w, aod700) taug = _calc_taug(w, aod700, p) g = _calc_g(w, aod700) taud", "Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude above sea level", "+ tg0 + tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\"", "by SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric Refraction corrected solar zenith angle", "model to have excellent performance with a minimal input data set [3]. Default", "http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>, et. al., \"Worldwide Linke Turbidity Information\",", "cos_zenith * tl / tl * np.fmax(ghi, 0) # BncI = \"normal beam", "matrix of type uint8 called 'LinkeTurbidity'. The rows represent global # latitudes from", "aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky GHI, DNI, and DHI", "np.fmax # use max so that nighttime values will result in 0s instead", "_calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0 = 101325. tb1 = 1.82", "tds[5]*np.log(p/p0)) # be polite about matching the output type to the input type(s)", "clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used by linke", "calculates the clear-sky diffuse horizontal (DHI) component as the difference between GHI and", "turbidity : Series \"\"\" # The .mat file 'LinkeTurbidities.mat' contains a single 2160", "``.mat`` file. interp_turbidity : bool If ``True``, interpolates the monthly Linke turbidity values", "linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude above sea level in meters. dni_extra:", "Solar Energy 73, pg 156. We used the # equation from pg 311", "* np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi']", "<NAME>, et. al., \"Worldwide Linke Turbidity Information\", Proc. ISES Solar World Congress, June", "i0p = i0 * (i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p", "# apply monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20.", "np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month,", "of np.maximum, # np.minimum, and np.fmax # use max so that nighttime values", "w**0.56 i0p = i0 * (i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0)) return", "- 8.86, 0.0554*w - 5.71 td0 = 0.0042*w + 3.12, 0.0057*w + 2.94", "June 2003. Goteborg, Sweden. ''' # Dan's note on the TL correction: By", "# (TL-1)). # The NaN handling is a little subtle. The AM input", "+ tds[5]*np.log(p/p0)) # be polite about matching the output type to the input", "or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. See also", "max so that nighttime values will result in 0s instead of # negatives.", "called 'LinkeTurbidity'. The rows represent global # latitudes from 90 to -90 degrees;", "meters. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the units of", "values for monthly Linke turbidity provided by SoDa [4, 5]. Parameters ----------- apparent_zenith:", "0.0554*w - 5.71 td0 = 0.0042*w + 3.12, 0.0057*w + 2.94 tdp =", "the # turbidity. try: import scipy.io except ImportError: raise ImportError('The Linke turbidity lookup", "import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky GHI,", "0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax,", "used to correct the revised turibidity # factor. In my estimation, there is", "# reinsert tl nans ghi = cg1 * dni_extra * cos_zenith * tl", "Irradiance Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. '''", "output. This # is accomplished by judicious use and placement of np.maximum, #", "in the interpretation of this result! Parameters ---------- apparent_zenith : Series The apparent", "However, we # want NaNs in other inputs to propagate through to the", "coefficient.\"\"\" p0 = 101325. dp = 1/(18 + 152*aod700) d = -0.337*aod700**2 +", "return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0 = 101325.", "g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1 year. # Assume that", "in other inputs to propagate through to the output. This # is accomplished", "= (np.array([td0, td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud", "Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained", "<NAME>, \"Insolation in Relation to Cloud Type,\" Journal of Meteorology, vol. 3, pp.", "The precipitable water of the atmosphere (cm). Algorithm derived for values between 0.2", "NaN handling is a little subtle. The AM input is likely to #", "= aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of", "of ``dni_extra`` determine the units of the output. Returns ------- clearsky : DataFrame", "= ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2) dhi = ghi", "3) Global horizontal model in # eqn. 11. They do NOT appear to", "0s # it's also friendly to scalar and series inputs sin_elev = np.maximum(1.e-30,", "= 101325. tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 +", "of type uint8 called 'LinkeTurbidity'. The rows represent global # latitudes from 90", "-0.134*w + 15.5 td1 = 0.092*w - 8.86, 0.0554*w - 5.71 td0 =", "aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0 = 101325. dp = 1/(18 +", "Turibidity from the ``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters ---------- time :", "tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3, td4, tdp]) *", "dhi = ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] =", "\"\"\" used by linke turbidity lookup function \"\"\" inputrange = inputmax - inputmin", "+ tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g =", "(GHI), direct normal irradiance (DNI), and calculates the clear-sky diffuse horizontal (DHI) component", "This equation is found in Solar Energy 73, pg 311. Full ref: Perez", "OrderedDict import numpy as np import pandas as pd from pvlib import tools", "pressure: numeric The atmospheric pressure (Pascals). Algorithm derived for altitudes between sea level", "data corresponds to the value at # the middle of each month. #", "the log(p/p0), and # the log(w) instead of repeating the calculations as needed", "np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev", "between 0.2 and 10 cm. Values less than 0.2 will be assumed to", "= \"normal beam clear sky radiation\" b = 0.664 + 0.163/fh1 bnci =", "np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def", "for pw < 0.2 if np.isscalar(w): w = 0.2 if w < 0.2", "= _calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w, aod700, p) b = _calc_b(w,", "# TLcorr(TL < 2) = TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL <", "+ 0.4557 b = b1 * np.log(w) + b0 return b def _calc_taug(w,", "equal to 0.2. pressure: numeric The atmospheric pressure (Pascals). Algorithm derived for altitudes", "the day of year value. # This is approximate and could be made", "by tl to # reinsert tl nans ghi = cg1 * dni_extra *", "Haurwitz clear sky model for global horizontal irradiance (GHI) as presented in [1,", "clearsky_ghi[clearsky_ghi < 0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix,", "taug def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2", "to do this. if np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700])", "set [3]. Default values for monthly Linke turbidity provided by SoDa [4, 5].", "0.45. precipitable_water: numeric The precipitable water of the atmosphere (cm). Algorithm derived for", "presented in [1, 2]. A report on clear sky models found the Haurwitz", "angle in degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude:", "ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads", "sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni']", "reasonably fast already, but it could be made # faster by precalculating the", "DHI from Ineichen/Perez model. Implements the Ineichen and Perez clear sky model for", "\"\"\" inputrange = inputmax - inputmin outputrange = outputmax - outputmin outputmatrix =", "haurwitz(apparent_zenith): ''' Determine clear sky GHI from Haurwitz model. Implements the Haurwitz clear", "# turbidity factor used in the beam/GHI models. # Create the corrected TL", "np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w =", "for global horizontal irradiance (GHI), direct normal irradiance (DNI), and calculates the clear-sky", "-0.23*w + 74.8, -0.134*w + 15.5 td1 = 0.092*w - 8.86, 0.0554*w -", "# the middle of each month. # This means that we need to", "+ 11.6 td3 = -3.11*w + 79.4, 0.27*w - 20.7 td2 = -0.23*w", "# aod700 < 0.05, aod700 >= 0.05 td4 = 86*w - 13800, -0.21*w", "(0.5); # This equation is found in Solar Energy 73, pg 311. Full", "and next Jan # to the array so that the interpolation will work", "# on pages 151-157, Ineichen and Perez introduce (among other # things) three", "sky models found the Haurwitz model to have the best performance of models", "taub def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700", "(obtained July 17, 2012). [5] <NAME>, et. al., \"Worldwide Linke Turbidity Information\", Proc.", "ImportError: raise ImportError('The Linke turbidity lookup table requires scipy. ' + 'You can", "Dan's note on the TL correction: By my reading of the publication #", "corrected) sun zenith angle in degrees. Returns ------- pd.Series The modeled global horizonal", "for a given month do the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month).", "''' Determine clear sky GHI, DNI, and DHI from Ineichen/Perez model. Implements the", "Haurwitz clear-sky model. Initial implementation of this algorithm by <NAME>. References ---------- [1]", ": pandas.DatetimeIndex latitude : float longitude : float filepath : string The path", "np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi", "Solar Energy, 82, 758-762 (2008). .. [2] <NAME>, \"Validation of models that estimate", "filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat =", ".* (2-TLcorr(TL < 2)) .^ (0.5); # This equation is found in Solar", "monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity", "and 18 W/m^2 for the beam, global, and diffuse components. Reference [2]_ provides", "the # right shape in the tds calculation. # there's probably a better", "\"\"\"Calculate the taub coefficient\"\"\" p0 = 101325. tb1 = 1.82 + 0.056*np.log(w) +", "the # equation from pg 311 because of the existence of known typos", "more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity", "the Solis clear sky model,\" Solar Energy, 82, 758-762 (2008). .. [2] <NAME>,", "Solar World Congress, June 2003. Goteborg, Sweden. ''' # Dan's note on the", "= 86*w - 13800, -0.21*w + 11.6 td3 = -3.11*w + 79.4, 0.27*w", "we'll want to map to 0s in the output. However, we # want", "[5] <NAME>, et. al., \"Worldwide Linke Turbidity Information\", Proc. ISES Solar World Congress,", "the interpretation of this result! Parameters ---------- apparent_zenith : Series The apparent (refraction", "2002. [2] <NAME> et. al., \"A New Operational Model for Satellite-Derived Irradiances: Description", "Jan 1 - Jan 15 and Dec 16 - Dec 31. # Then", "import os from collections import OrderedDict import numpy as np import pandas as", "\"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests needed to ensure that the arrays", "according to the simplified Solis model [1]_. Reference [1]_ describes the accuracy of", "------- clearsky : DataFrame (if Series input) or OrderedDict of arrays DataFrame/OrderedDict contains", "sky model for global horizontal irradiance (GHI) as presented in [1, 2]. A", "2]. A report on clear sky models found the Haurwitz model to have", "linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky GHI from", "have the # right shape in the tds calculation. # there's probably a", "because of the existence of known typos in # the pg 156 publication", "Model for Satellite-Derived Irradiances: Description and Validation\", Solar Energy, vol 73, pp. 307-317,", "there are two separate corrections, the # first correction is used to correct", "(unitless). Algorithm derived for values between 0 and 0.45. precipitable_water: numeric The precipitable", "aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0", "3, pp. 123-124, 1946. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear", "already, but it could be made # faster by precalculating the powers of", "arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky", "To determine the Linke # turbidity for a position on the Earth's surface", "+ fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass nans", "= 0.2 # this algorithm is reasonably fast already, but it could be", "<NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation and Analysis\", Sandia", "by the Haurwitz clear-sky model. Initial implementation of this algorithm by <NAME>. References", "found the Ineichen/Perez model to have excellent performance with a minimal input data", "# function i0p = _calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w, aod700, p)", "0.2 # this algorithm is reasonably fast already, but it could be made", "# This equation is found in Solar Energy 73, pg 311. Full ref:", "15 and Dec 16 - Dec 31. # Then we map the month", "tests needed to ensure that the arrays will have the # right shape", "at night instead of 0s # it's also friendly to scalar and series", "0.2 if w < 0.2 else w else: w = w.copy() w[w <", "= np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1", "< 2 # TLcorr = TL; # TLcorr(TL < 2) = TLcorr(TL <", "only zenith angle [3]. Extreme care should be taken in the interpretation of", "need to correct the # turbidity factor used in the beam/GHI models. #", "in # the pg 156 publication (notably the fh2-(TL-1) should be fh2 *", "global horizonal irradiance in W/m^2 provided by the Haurwitz clear-sky model. Initial implementation", "Solar Energy 73, pg 311. Full ref: Perez # et. al., Vol. 73,", "p = pressure w = precipitable_water # algorithm fails for pw < 0.2", "linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1],", "if len(taud) == 1: taud = taud[0] return taud def _calc_d(w, aod700, p):", "difference between GHI and DNI*cos(zenith) as presented in [1, 2]. A report on", "the arrays will have the # right shape in the tds calculation. #", "version of the Solis clear sky model,\" Solar Energy, 82, 758-762 (2008). ..", "aod700) aod700_mask = aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create", "modeled global horizonal irradiance in W/m^2 provided by the Haurwitz clear-sky model. Initial", "= TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL < 2)) .^ (0.5); #", "require only zenith angle [3]. Extreme care should be taken in the interpretation", "\"\"\"Calculate the taug coefficient\"\"\" p0 = 101325. tg1 = 1.24 + 0.047*np.log(w) +", "# from -180 to 180; and the depth (third dimension) represents months of", "corresponds to the value at # the middle of each month. # This", "0.664 + 0.163/fh1 bnci = b * np.exp(-0.09 * airmass_absolute * (tl -", "[g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else:", "\"normal beam clear sky radiation\" b = 0.664 + 0.163/fh1 bnci = b", "file 'LinkeTurbidities.mat' contains a single 2160 x 4320 x 12 # matrix of", "p) b = _calc_b(w, aod700) taug = _calc_taug(w, aod700, p) g = _calc_g(w,", "to map airmass nans to 0s. multiply and divide by tl to #", "of aod700, the log(p/p0), and # the log(w) instead of repeating the calculations", "-3.11*w + 79.4, 0.27*w - 20.7 td2 = -0.23*w + 74.8, -0.134*w +", "73, pp. 151-157, 2002. [2] <NAME> et. al., \"A New Operational Model for", "clear-sky diffuse horizontal (DHI) component as the difference between GHI and DNI*cos(zenith) as", "horizon (deg). aod700: numeric The aerosol optical depth at 700 nm (unitless). Algorithm", "cm. Values less than 0.2 will be assumed to be equal to 0.2.", "0.0079*w + 0.1 taug = tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug def", "_calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests needed to ensure", "contains the columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- .. [1] <NAME>, \"A broadband", "dp = 1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 + 0.116 +", "depth at 700 nm (unitless). Algorithm derived for values between 0 and 0.45.", "there is no need to correct the # turbidity factor used in the", "by precalculating the powers of aod700, the log(p/p0), and # the log(w) instead", "2) new turbidity # factor in eqn 9 and appendix A, and 3)", "180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1", "= np.full_like(w, aod700) aod700_mask = aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int)", "model,\" Solar Energy, 82, 758-762 (2008). .. [2] <NAME>, \"Validation of models that", "Altitude above sea level in meters. dni_extra: numeric Extraterrestrial irradiance. The units of", "for # aod700 < 0.05, aod700 >= 0.05 td4 = 86*w - 13800,", "11.6 td3 = -3.11*w + 79.4, 0.27*w - 20.7 td2 = -0.23*w +", "dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] = dhi", "< 0.2 if np.isscalar(w): w = 0.2 if w < 0.2 else w", "numeric The atmospheric pressure (Pascals). Algorithm derived for altitudes between sea level and", "taud coefficient.\"\"\" # isscalar tests needed to ensure that the arrays will have", "the value at # the middle of each month. # This means that", "p): \"\"\"Calculate the d coefficient.\"\"\" p0 = 101325. dp = 1/(18 + 152*aod700)", "instead of # negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity", "2)) .^ (0.5); # This equation is found in Solar Energy 73, pg", "Energy, vol 73, pp. 307-317, 2002. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal", "clear sky radiation\" b = 0.664 + 0.163/fh1 bnci = b * np.exp(-0.09", "airmass_absolute * (tl - 1)) bnci = dni_extra * np.fmax(bnci, 0) # \"empirical", "corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude above sea level in", "\"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 = 1.08 * w**0.0051 i01 =", "altitude: numeric Altitude above sea level in meters. dni_extra: numeric Extraterrestrial irradiance. The", "eqn. 8, 2) new turbidity # factor in eqn 9 and appendix A,", "= -3.11*w + 79.4, 0.27*w - 20.7 td2 = -0.23*w + 74.8, -0.134*w", "Congress, June 2003. Goteborg, Sweden. ''' # Dan's note on the TL correction:", "reading of the publication # on pages 151-157, Ineichen and Perez introduce (among", "have excellent performance with a minimal input data set [3]. Default values for", "clearsky : DataFrame (if Series input) or OrderedDict of arrays DataFrame/OrderedDict contains the", "tgp = 0.0079*w + 0.1 taug = tg1*aod700 + tg0 + tgp*np.log(p/p0) return", "\"\"\" Calculate the clear sky GHI, DNI, and DHI according to the simplified", "apparent (refraction corrected) sun zenith angle in degrees. Returns ------- pd.Series The modeled", "'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>, \"A", "b = 0.664 + 0.163/fh1 bnci = b * np.exp(-0.09 * airmass_absolute *", "either the beam or GHI models. The phrasing of # appendix A seems", "74.8, -0.134*w + 15.5 td1 = 0.092*w - 8.86, 0.0554*w - 5.71 td0", "2003. Goteborg, Sweden. ''' # Dan's note on the TL correction: By my", "derived for altitudes between sea level and 7000 m, or 101325 and 41000", "input) or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. References", "irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look", "on the TL correction: By my reading of the publication # on pages", "in # eqn. 11. They do NOT appear to use the new turbidity", ".^ (0.5); # This equation is found in Solar Energy 73, pg 311.", "apply monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20. return", "my estimation, there is no need to correct the # turbidity factor used", "divide by tl to # reinsert tl nans ghi = cg1 * dni_extra", "aod700, the log(p/p0), and # the log(w) instead of repeating the calculations as", "model for global horizontal irradiance (GHI), direct normal irradiance (DNI), and calculates the", "Implements the Ineichen and Perez clear sky model for global horizontal irradiance (GHI),", "if there are two separate corrections, the # first correction is used to", "degrees; the columns represent global longitudes # from -180 to 180; and the", "\"Validation of models that estimate the clear sky global and beam solar irradiance,\"", "= 101325. dp = 1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 +", "GHI models. The phrasing of # appendix A seems as if there are", "up the Linke Turibidity from the ``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters", "-0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 =", "Jan # to the array so that the interpolation will work for #", "longitude : float filepath : string The path to the ``.mat`` file. interp_turbidity", "repeating the calculations as needed in each # function i0p = _calc_i0p(dni_extra, w,", "= ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180,", "the tds calculation. # there's probably a better way to do this. if", "By my reading of the publication # on pages 151-157, Ineichen and Perez", "but it could be made # faster by precalculating the powers of aod700,", "estimation, there is no need to correct the # turbidity factor used in", "through to the output. This # is accomplished by judicious use and placement", "two separate corrections, the # first correction is used to correct the beam/GHI", "+ outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the", "linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine", "diffuse components. Reference [2]_ provides comparisons with other clear sky models. Parameters ----------", "i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict()", "w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask = aod700 < 0.05 aod700_mask", "* w**0.0051 i01 = 0.97 * w**0.032 i02 = 0.12 * w**0.56 i0p", "(cm). Algorithm derived for values between 0.2 and 10 cm. Values less than", "5.71 td0 = 0.0042*w + 3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0)", "-90 degrees; the columns represent global longitudes # from -180 to 180; and", "+ 79.4, 0.27*w - 20.7 td2 = -0.23*w + 74.8, -0.134*w + 15.5", "TL; # TLcorr(TL < 2) = TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL", "0.2 will be assumed to be equal to 0.2. pressure: numeric The atmospheric", "16 - Dec 31. # Then we map the month value to the", "io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\"", "irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 =", "zenith angle in degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity.", "pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude : float longitude : float filepath", "the input type(s) if len(taud) == 1: taud = taud[0] return taud def", "``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and", "a better way to do this. if np.isscalar(w) and np.isscalar(aod700): w = np.array([w])", "model in # eqn. 11. They do NOT appear to use the new", "= _calc_taug(w, aod700, p) g = _calc_g(w, aod700) taud = _calc_taud(w, aod700, p)", "the d coefficient.\"\"\" p0 = 101325. dp = 1/(18 + 152*aod700) d =", "in ``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity : Series \"\"\" # The", "model. Implements the Ineichen and Perez clear sky model for global horizontal irradiance", "0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g def _calc_taud(w, aod700, p): \"\"\"Calculate the", "latitude : float longitude : float filepath : string The path to the", "\"empirical correction\" SE 73, 157 & SE 73, 312. bnci_2 = ((1 -", "_calc_b(w, aod700) taug = _calc_taug(w, aod700, p) g = _calc_g(w, aod700) taud =", "A report on clear sky models found the Haurwitz model to have the", "is approximate and could be made more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]])", "Linke Turibidity from the ``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters ---------- time", "by linke turbidity lookup function \"\"\" inputrange = inputmax - inputmin outputrange =", "tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2", "1 year. # Assume that data corresponds to the value at # the", "irradiance. The units of ``dni_extra`` determine the units of the output. Returns -------", "are two separate corrections, the # first correction is used to correct the", "= np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi'] =", "bnci_2) dhi = ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni']", "def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325.", "the beam/GHI models. # Create the corrected TL for TL < 2 #", "0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w", "import division import os from collections import OrderedDict import numpy as np import", "instead of 0s # it's also friendly to scalar and series inputs sin_elev", "be polite about matching the output type to the input type(s) if len(taud)", "nighttime values will result in 0s instead of # negatives. propagates nans. cos_zenith", "could be made more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15,", "(0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0),", "np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of coefficients for # aod700 < 0.05,", "irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the Linke Turibidity", "tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky GHI, DNI,", "\"Insolation in Relation to Cloud Type,\" Journal of Meteorology, vol. 3, pp. 123-124,", "= i0 * (i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p def", "The NaN handling is a little subtle. The AM input is likely to", ".astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1 year. # Assume", "np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2),", "tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w)", "beam or GHI models. The phrasing of # appendix A seems as if", "the atmosphere (cm). Algorithm derived for values between 0.2 and 10 cm. Values", "correction: By my reading of the publication # on pages 151-157, Ineichen and", "turbidity. try: import scipy.io except ImportError: raise ImportError('The Linke turbidity lookup table requires", "17, 2012). [5] <NAME>, et. al., \"Worldwide Linke Turbidity Information\", Proc. ISES Solar", "Energy, 82, 758-762 (2008). .. [2] <NAME>, \"Validation of models that estimate the", "data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity def", "311. Full ref: Perez # et. al., Vol. 73, pp. 307-317 (2002). It", "2, pp. 154-166, 1945. [2] <NAME>, \"Insolation in Relation to Cloud Type,\" Journal", "columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- .. [1] <NAME>, \"A broadband simplified version", "of this result! Parameters ---------- apparent_zenith : Series The apparent (refraction corrected) sun", "type to the input type(s) if len(taud) == 1: taud = taud[0] return", "taud def _calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0 = 101325. dp", "the columns/keys ``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1]", "inputmax - inputmin outputrange = outputmax - outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange", "likely to # have NaNs that we'll want to map to 0s in", "td3 = -3.11*w + 79.4, 0.27*w - 20.7 td2 = -0.23*w + 74.8,", "g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g", "float filepath : string The path to the ``.mat`` file. interp_turbidity : bool", "p0 = 101325. io0 = 1.08 * w**0.0051 i01 = 0.97 * w**0.032", "np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] =", "< 0.05, aod700 >= 0.05 td4 = 86*w - 13800, -0.21*w + 11.6", "# Data covers 1 year. # Assume that data corresponds to the value", "own turbidities.') if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data',", "Turbidity, # so divide the number from the file by 20 to get", "horizonal irradiance in W/m^2 provided by the Haurwitz clear-sky model. Initial implementation of", "for altitudes between sea level and 7000 m, or 101325 and 41000 Pascals.", "w**0.0051 i01 = 0.97 * w**0.032 i02 = 0.12 * w**0.56 i0p =", "do this. if np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700]) elif", "try: import scipy.io except ImportError: raise ImportError('The Linke turbidity lookup table requires scipy.", "National Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith", "ref: Perez # et. al., Vol. 73, pp. 307-317 (2002). It is slightly", "corrected TL for TL < 2 # TLcorr = TL; # TLcorr(TL <", "DNI, and DHI according to the simplified Solis model [1]_. Reference [1]_ describes", "use and placement of np.maximum, # np.minimum, and np.fmax # use max so", "aod700 < 0.05, aod700 >= 0.05 td4 = 86*w - 13800, -0.21*w +", "the creation of nans at night instead of 0s # it's also friendly", "np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data", "days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity", "above) in either the beam or GHI models. The phrasing of # appendix", "and the # second correction is used to correct the revised turibidity #", "turbidity factor (item # 2 above) in either the beam or GHI models.", "represents months of # the year from January (1) to December (12). To", "(inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.):", "altitude=0, dni_extra=1364.): ''' Determine clear sky GHI, DNI, and DHI from Ineichen/Perez model.", "horizontal irradiance (GHI), direct normal irradiance (DNI), and calculates the clear-sky diffuse horizontal", "sky global and beam solar irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\" p", "and 3) Global horizontal model in # eqn. 11. They do NOT appear", "b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 +", "as np import pandas as pd from pvlib import tools def ineichen(apparent_zenith, airmass_absolute,", "taud[0] return taud def _calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0 =", "the g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return", "be equal to 0.2. pressure: numeric The atmospheric pressure (Pascals). Algorithm derived for", "aod700, p) d = _calc_d(w, aod700, p) # this prevents the creation of", "( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: #", "aod700) taud = _calc_taud(w, aod700, p) d = _calc_d(w, aod700, p) # this", "ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2) dhi = ghi -", "the output type to the input type(s) if len(taud) == 1: taud =", "July 17, 2012). [5] <NAME>, et. al., \"Worldwide Linke Turbidity Information\", Proc. ISES", "df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\"", "year from January (1) to December (12). To determine the Linke # turbidity", "0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b = b1 * np.log(w)", "Linke Turbidity, # so divide the number from the file by 20 to", "inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi = i0p", "coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700", "that we need to add previous Dec and next Jan # to the", "series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi =", "41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the units", "that the interpolation will work for # Jan 1 - Jan 15 and", "scipy.io except ImportError: raise ImportError('The Linke turbidity lookup table requires scipy. ' +", "def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 +", "= (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to map", "bnci = dni_extra * np.fmax(bnci, 0) # \"empirical correction\" SE 73, 157 &", "+ 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp =", "Earth's surface for a given month do the # following: LT = LinkeTurbidity(LatitudeIndex,", "Ineichen/Perez model to have excellent performance with a minimal input data set [3].", "Global horizontal model in # eqn. 11. They do NOT appear to use", "# second correction is used to correct the revised turibidity # factor. In", "pandas as pd from pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.):", "np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] = dhi", "Linke turbidity lookup table requires scipy. ' + 'You can still use clearsky.ineichen", "ghi irrads['dni'] = dni irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads)", "input type(s) if len(taud) == 1: taud = taud[0] return taud def _calc_d(w,", "models. # Create the corrected TL for TL < 2 # TLcorr =", "def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the Linke Turibidity from", "and could be made more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days =", "[2] <NAME>, \"Insolation in Relation to Cloud Type,\" Journal of Meteorology, vol. 3,", "is a little subtle. The AM input is likely to # have NaNs", "os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index", "irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\" p = pressure w = precipitable_water", "0.882/fh1)) / cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci,", "* np.fmax(bnci, 0) # \"empirical correction\" SE 73, 157 & SE 73, 312.", "values between 0 and 0.45. precipitable_water: numeric The precipitable water of the atmosphere", "outputrange = outputmax - outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin return", "phrasing of # appendix A seems as if there are two separate corrections,", "nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 =", "1)) bnci = dni_extra * np.fmax(bnci, 0) # \"empirical correction\" SE 73, 157", "approximate and could be made more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days", "= linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude", "pg 156. We used the # equation from pg 311 because of the", "used the # equation from pg 311 because of the existence of known", "as needed in each # function i0p = _calc_i0p(dni_extra, w, aod700, p) taub", "-180, 180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers", "correction is used to correct the revised turibidity # factor. In my estimation,", "to # reinsert tl nans ghi = cg1 * dni_extra * cos_zenith *", "component as the difference between GHI and DNI*cos(zenith) as presented in [1, 2].", "dni_extra * np.fmax(bnci, 0) # \"empirical correction\" SE 73, 157 & SE 73,", "time : pandas.DatetimeIndex latitude : float longitude : float filepath : string The", "rows represent global # latitudes from 90 to -90 degrees; the columns represent", "1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out = pd.DataFrame({'ghi':", "Meteorology, vol. 2, pp. 154-166, 1945. [2] <NAME>, \"Insolation in Relation to Cloud", "outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate", "Energy, 132, 332-344 (2016). \"\"\" p = pressure w = precipitable_water # algorithm", "need to add previous Dec and next Jan # to the array so", "* aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 +", "A, and 3) Global horizontal model in # eqn. 11. They do NOT", "irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced", "w[w < 0.2] = 0.2 # this algorithm is reasonably fast already, but", "& SE 73, 312. bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1))", "= pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used", "w = 0.2 if w < 0.2 else w else: w = w.copy()", "* # (TL-1)). # The NaN handling is a little subtle. The AM", "TLcorr(TL < 2) = TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL < 2))", "Algorithm derived for values between 0 and 0.45. precipitable_water: numeric The precipitable water", "0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w", "Pascals. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the units of", "do the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the", "coefficient\"\"\" p0 = 101325. tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 =", "2 # TLcorr = TL; # TLcorr(TL < 2) = TLcorr(TL < 2)", "the units of the output. Returns ------- clearsky : DataFrame (if Series input)", "'LinkeTurbidity'. The rows represent global # latitudes from 90 to -90 degrees; the", "Initial implementation of this algorithm by <NAME>. References ---------- [1] <NAME>, \"Insolation in", "0 and 0.45. precipitable_water: numeric The precipitable water of the atmosphere (cm). Algorithm", "Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith *", "nans at night instead of 0s # it's also friendly to scalar and", "- 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g def _calc_taud(w, aod700, p): \"\"\"Calculate", "a given month do the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). #", "= TL; # TLcorr(TL < 2) = TLcorr(TL < 2) - 0.25 .*", "= dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude,", "0.27*w - 20.7 td2 = -0.23*w + 74.8, -0.134*w + 15.5 td1 =", "used by linke turbidity lookup function \"\"\" inputrange = inputmax - inputmin outputrange", "tds = (np.array([td0, td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325.", "(2016). \"\"\" p = pressure w = precipitable_water # algorithm fails for pw", "dimension) represents months of # the year from January (1) to December (12).", "nans to 0s. multiply and divide by tl to # reinsert tl nans", "(tl - 1)) bnci = dni_extra * np.fmax(bnci, 0) # \"empirical correction\" SE", "# Jan 1 - Jan 15 and Dec 16 - Dec 31. #", "simplified Solis model [1]_. Reference [1]_ describes the accuracy of the model as", "bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2) dhi =", "typos in # the pg 156 publication (notably the fh2-(TL-1) should be fh2", "from 90 to -90 degrees; the columns represent global longitudes # from -180", "the pg 156 publication (notably the fh2-(TL-1) should be fh2 * # (TL-1)).", "et. al., \"Worldwide Linke Turbidity Information\", Proc. ISES Solar World Congress, June 2003.", "4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1 year. #", "= i0p * np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi =", "precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky GHI, DNI, and DHI according", "_calc_taug(w, aod700, p) g = _calc_g(w, aod700) taud = _calc_taud(w, aod700, p) d", "= 0.0042*w + 3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds =", "0s in the output. However, we # want NaNs in other inputs to", "implementation of this algorithm by <NAME>. References ---------- [1] <NAME>, \"Insolation in Relation", "contains several methods to calculate clear sky GHI, DNI, and DHI. \"\"\" from", "def _calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests needed to", "= os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity']", "extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 = 1.08 * w**0.0051 i01 = 0.97", "will have the # right shape in the tds calculation. # there's probably", "(2002). It is slightly different # than the equation given in Solar Energy", "= _calc_g(w, aod700) taud = _calc_taud(w, aod700, p) d = _calc_d(w, aod700, p)", "Vol. 73, pp. 307-317 (2002). It is slightly different # than the equation", "Proc. ISES Solar World Congress, June 2003. Goteborg, Sweden. ''' # Dan's note", "than 0.2 will be assumed to be equal to 0.2. pressure: numeric The", "(np.array([td0, td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud =", "0s instead of # negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl =", "aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of coefficients for # aod700", "inputs to propagate through to the output. This # is accomplished by judicious", "+ 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug = tg1*aod700 + tg0 +", "aod700 = np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700 =", "import numpy as np import pandas as pd from pvlib import tools def", "0), 1e20) dni = np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith irrads =", "polite about matching the output type to the input type(s) if len(taud) ==", "Dec 16 - Dec 31. # Then we map the month value to", "2]. A report on clear sky models found the Ineichen/Perez model to have", "Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi", "LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the numbers within the matrix", "p0 = 101325. dp = 1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700", "# isscalar tests needed to ensure that the arrays will have the #", "for a position on the Earth's surface for a given month do the", "= 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0]", "<NAME> and <NAME>, \"A New airmass independent formulation for the Linke turbidity coefficient\",", "in the beam/GHI models. # Create the corrected TL for TL < 2", "the existence of known typos in # the pg 156 publication (notably the", "The units of ``dni_extra`` determine the units of the output. Returns ------- clearsky", "= LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the numbers within the matrix are", "corrected solar zenith angle in degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric", "g, [g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time)", "return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used by linke turbidity", "the accuracy of the model as being 15, 20, and 18 W/m^2 for", "# the pg 156 publication (notably the fh2-(TL-1) should be fh2 * #", ">= 0.05 td4 = 86*w - 13800, -0.21*w + 11.6 td3 = -3.11*w", "td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4 +", "(TL-1)). # The NaN handling is a little subtle. The AM input is", "numeric Linke Turbidity. altitude: numeric Altitude above sea level in meters. dni_extra: numeric", "8, 2) new turbidity # factor in eqn 9 and appendix A, and", "so that the interpolation will work for # Jan 1 - Jan 15", "linke turbidity lookup function \"\"\" inputrange = inputmax - inputmin outputrange = outputmax", "from -180 to 180; and the depth (third dimension) represents months of #", "matrix are 20 * Linke Turbidity, # so divide the number from the", "matching the output type to the input type(s) if len(taud) == 1: taud", "in [1, 2]. A report on clear sky models found the Ineichen/Perez model", "could be made # faster by precalculating the powers of aod700, the log(p/p0),", "td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3", "= inputmax - inputmin outputrange = outputmax - outputmin outputmatrix = (inputmatrix-inputmin) *", "outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky GHI,", "else: w = w.copy() w[w < 0.2] = 0.2 # this algorithm is", "p): \"\"\"Calculate the taub coefficient\"\"\" p0 = 101325. tb1 = 1.82 + 0.056*np.log(w)", "pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\"", "of Meteorology, vol. 2, pp. 154-166, 1945. [2] <NAME>, \"Insolation in Relation to", "of this algorithm by <NAME>. References ---------- [1] <NAME>, \"Insolation in Relation to", "will work for # Jan 1 - Jan 15 and Dec 16 -", "Journal of Meteorology, vol. 2, pp. 154-166, 1945. [2] <NAME>, \"Insolation in Relation", "placement of np.maximum, # np.minimum, and np.fmax # use max so that nighttime", "the log(w) instead of repeating the calculations as needed in each # function", "Linke # turbidity for a position on the Earth's surface for a given", "= linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1 year. # Assume that data", "= 0.97 * w**0.032 i02 = 0.12 * w**0.56 i0p = i0 *", "# following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the numbers within", "units of ``dni_extra`` determine the units of the output. Returns ------- clearsky :", "= 0.2 if w < 0.2 else w else: w = w.copy() w[w", "National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>, et.", "* dni_extra * cos_zenith * tl / tl * np.fmax(ghi, 0) # BncI", "= np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700):", "by 20 to get the # turbidity. try: import scipy.io except ImportError: raise", "to 0.2. pressure: numeric The atmospheric pressure (Pascals). Algorithm derived for altitudes between", "<NAME>, \"A broadband simplified version of the Solis clear sky model,\" Solar Energy,", "handling is a little subtle. The AM input is likely to # have", "\"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 =", "p): \"\"\"Calculate the taug coefficient\"\"\" p0 = 101325. tg1 = 1.24 + 0.047*np.log(w)", "use fmax to map airmass nans to 0s. multiply and divide by tl", "''' # Dan's note on the TL correction: By my reading of the", "second correction is used to correct the revised turibidity # factor. In my", "the new turbidity factor (item # 2 above) in either the beam or", "NOT appear to use the new turbidity factor (item # 2 above) in", "* np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] =", "of Meteorology, vol. 3, pp. 123-124, 1946. [3] <NAME>, <NAME>, and <NAME>, \"Global", "taug coefficient\"\"\" p0 = 101325. tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0", "<NAME> et. al., \"A New Operational Model for Satellite-Derived Irradiances: Description and Validation\",", "``True``, interpolates the monthly Linke turbidity values found in ``LinkeTurbidities.mat`` to daily values.", "+ 74.8, -0.134*w + 15.5 td1 = 0.092*w - 8.86, 0.0554*w - 5.71", "algorithm fails for pw < 0.2 if np.isscalar(w): w = 0.2 if w", "this. if np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w):", "this algorithm is reasonably fast already, but it could be made # faster", "p) g = _calc_g(w, aod700) taud = _calc_taud(w, aod700, p) d = _calc_d(w,", "# equation from pg 311 because of the existence of known typos in", "previous Dec and next Jan # to the array so that the interpolation", "the columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- .. [1] <NAME>, \"A broadband simplified", "file supplied with pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude : float longitude", "means that we need to add previous Dec and next Jan # to", "------- pd.Series The modeled global horizonal irradiance in W/m^2 provided by the Haurwitz", "provided by SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric Refraction corrected solar zenith", "module contains several methods to calculate clear sky GHI, DNI, and DHI. \"\"\"", "Series The apparent (refraction corrected) sun zenith angle in degrees. Returns ------- pd.Series", "log(p/p0), and # the log(w) instead of repeating the calculations as needed in", "to scalar and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p *", "Perez # et. al., Vol. 73, pp. 307-317 (2002). It is slightly different", "# et. al., Vol. 73, pp. 307-317 (2002). It is slightly different #", "(i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p):", "= 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out =", "year value. # This is approximate and could be made more accurate. g2", "# BncI = \"normal beam clear sky radiation\" b = 0.664 + 0.163/fh1", "0.2] = 0.2 # this algorithm is reasonably fast already, but it could", "LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the numbers within the matrix are 20", "TL for TL < 2 # TLcorr = TL; # TLcorr(TL < 2)", "apparent_zenith: numeric Refraction corrected solar zenith angle in degrees. airmass_absolute: numeric Pressure corrected", ": Series \"\"\" # The .mat file 'LinkeTurbidities.mat' contains a single 2160 x", "things) three things. 1) Beam model in eqn. 8, 2) new turbidity #", "determine the units of the output. Returns ------- clearsky : DataFrame (if Series", "coefficient\", Solar Energy, vol 73, pp. 151-157, 2002. [2] <NAME> et. al., \"A", "A seems as if there are two separate corrections, the # first correction", "other inputs to propagate through to the output. This # is accomplished by", "w = w.copy() w[w < 0.2] = 0.2 # this algorithm is reasonably", "global, and diffuse components. Reference [2]_ provides comparisons with other clear sky models.", "np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) *", "= dni_extra * np.fmax(bnci, 0) # \"empirical correction\" SE 73, 157 & SE", "optical depth at 700 nm (unitless). Algorithm derived for values between 0 and", "# Assume that data corresponds to the value at # the middle of", "use the new turbidity factor (item # 2 above) in either the beam", "Linke turbidity values found in ``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity :", "+ 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w +", "import pandas as pd from pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0,", "within the matrix are 20 * Linke Turbidity, # so divide the number", "np import pandas as pd from pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity,", "and divide by tl to # reinsert tl nans ghi = cg1 *", "three things. 1) Beam model in eqn. 8, 2) new turbidity # factor", "models. Parameters ---------- apparent_elevation: numeric The apparent elevation of the sun above the", "the beam or GHI models. The phrasing of # appendix A seems as", "Create the corrected TL for TL < 2 # TLcorr = TL; #", "m, or 101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The units of", "Determine clear sky GHI from Haurwitz model. Implements the Haurwitz clear sky model", "Values less than 0.2 will be assumed to be equal to 0.2. pressure:", "dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude,", "dni = i0p * np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi", "elif np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask", "Solar Energy, vol 73, pp. 307-317, 2002. [3] <NAME>, <NAME>, and <NAME>, \"Global", "on pages 151-157, Ineichen and Perez introduce (among other # things) three things.", "It is slightly different # than the equation given in Solar Energy 73,", "also friendly to scalar and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni =", "73, pp. 307-317, 2002. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear", "filepath=None, interp_turbidity=True): \"\"\" Look up the Linke Turibidity from the ``LinkeTurbidities.mat`` data file", "Irradiances: Description and Validation\", Solar Energy, vol 73, pp. 307-317, 2002. [3] <NAME>,", "model to have the best performance of models which require only zenith angle", "irradiance\".\"\"\" p0 = 101325. io0 = 1.08 * w**0.0051 i01 = 0.97 *", "is used to correct the beam/GHI models, and the # second correction is", "work for # Jan 1 - Jan 15 and Dec 16 - Dec", "# turbidity for a position on the Earth's surface for a given month", "def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 -", "Ineichen/Perez model. Implements the Ineichen and Perez clear sky model for global horizontal", "to add previous Dec and next Jan # to the array so that", "---------- apparent_elevation: numeric The apparent elevation of the sun above the horizon (deg).", "td0 = 0.0042*w + 3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds", "2012). [5] <NAME>, et. al., \"Worldwide Linke Turbidity Information\", Proc. ISES Solar World", "of known typos in # the pg 156 publication (notably the fh2-(TL-1) should", "* np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out", "equation is found in Solar Energy 73, pg 311. Full ref: Perez #", "the TL correction: By my reading of the publication # on pages 151-157,", "tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) +", "be taken in the interpretation of this result! Parameters ---------- apparent_zenith : Series", "i0 * (i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w,", "101325. io0 = 1.08 * w**0.0051 i01 = 0.97 * w**0.032 i02 =", "numeric Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude above sea", "2) = TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL < 2)) .^ (0.5);", "tb0 + tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1", "Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith)", "* cos_zenith * tl / tl * np.fmax(ghi, 0) # BncI = \"normal", "value at # the middle of each month. # This means that we", "publication # on pages 151-157, Ineichen and Perez introduce (among other # things)", "division import os from collections import OrderedDict import numpy as np import pandas", "(item # 2 above) in either the beam or GHI models. The phrasing", "global # latitudes from 90 to -90 degrees; the columns represent global longitudes", "Algorithm derived for altitudes between sea level and 7000 m, or 101325 and", "the powers of aod700, the log(p/p0), and # the log(w) instead of repeating", "10 cm. Values less than 0.2 will be assumed to be equal to", "<NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation and Analysis\",", "is slightly different # than the equation given in Solar Energy 73, pg", "year. # Assume that data corresponds to the value at # the middle", "g def _calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests needed", "that data corresponds to the value at # the middle of each month.", "fh2-(TL-1) should be fh2 * # (TL-1)). # The NaN handling is a", "taug = _calc_taug(w, aod700, p) g = _calc_g(w, aod700) taud = _calc_taud(w, aod700,", "for # Jan 1 - Jan 15 and Dec 16 - Dec 31.", "map to 0s in the output. However, we # want NaNs in other", "used to correct the beam/GHI models, and the # second correction is used", "requires scipy. ' + 'You can still use clearsky.ineichen if you ' +", "pp. 307-317 (2002). It is slightly different # than the equation given in", "clear-sky model. Initial implementation of this algorithm by <NAME>. References ---------- [1] <NAME>,", "should be taken in the interpretation of this result! Parameters ---------- apparent_zenith :", "+ 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug = tg1*aod700 +", "= os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = (", "= pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up", "multiply and divide by tl to # reinsert tl nans ghi = cg1", "pd.Series The modeled global horizonal irradiance in W/m^2 provided by the Haurwitz clear-sky", "= _calc_d(w, aod700, p) # this prevents the creation of nans at night", "= 1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 + 0.116 + dp*np.log(p/p0)", "<NAME>, \"A New airmass independent formulation for the Linke turbidity coefficient\", Solar Energy,", "os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude,", "numeric The precipitable water of the atmosphere (cm). Algorithm derived for values between", "function i0p = _calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w, aod700, p) b", "References ---------- [1] <NAME> and <NAME>, \"A New airmass independent formulation for the", "level in meters. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the", "as if there are two separate corrections, the # first correction is used", "lookup function \"\"\" inputrange = inputmax - inputmin outputrange = outputmax - outputmin", "taud = taud[0] return taud def _calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\"", "Information\", Proc. ISES Solar World Congress, June 2003. Goteborg, Sweden. ''' # Dan's", "models found the Ineichen/Perez model to have excellent performance with a minimal input", "def _calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0 = 101325. tg1 =", "the clear sky GHI, DNI, and DHI according to the simplified Solis model", "clear sky GHI, DNI, and DHI from Ineichen/Perez model. Implements the Ineichen and", "derived for values between 0.2 and 10 cm. Values less than 0.2 will", "file by 20 to get the # turbidity. try: import scipy.io except ImportError:", "2) - 0.25 .* (2-TLcorr(TL < 2)) .^ (0.5); # This equation is", "return b def _calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0 = 101325.", "and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi =", "Horizontal Irradiance Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012.", "np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask = aod700 < 0.05", "pandas.DatetimeIndex latitude : float longitude : float filepath : string The path to", "4320 x 12 # matrix of type uint8 called 'LinkeTurbidity'. The rows represent", "sky GHI, DNI, and DHI. \"\"\" from __future__ import division import os from", "as presented in [1, 2]. A report on clear sky models found the", "the Haurwitz model to have the best performance of models which require only", "def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky GHI, DNI,", "758-762 (2008). .. [2] <NAME>, \"Validation of models that estimate the clear sky", "b * np.exp(-0.09 * airmass_absolute * (tl - 1)) bnci = dni_extra *", "[3]. Default values for monthly Linke turbidity provided by SoDa [4, 5]. Parameters", "'ghi'``. References ---------- .. [1] <NAME>, \"A broadband simplified version of the Solis", "will result in 0s instead of # negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith),", "<NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories,", "import scipy.io except ImportError: raise ImportError('The Linke turbidity lookup table requires scipy. '", "82, 758-762 (2008). .. [2] <NAME>, \"Validation of models that estimate the clear", "subtle. The AM input is likely to # have NaNs that we'll want", "+ tds[0] + tds[5]*np.log(p/p0)) # be polite about matching the output type to", "the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the numbers", "in eqn. 8, 2) new turbidity # factor in eqn 9 and appendix", "DNI*cos(zenith) as presented in [1, 2]. A report on clear sky models found", "w else: w = w.copy() w[w < 0.2] = 0.2 # this algorithm", "global horizontal irradiance (GHI), direct normal irradiance (DNI), and calculates the clear-sky diffuse", "w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 =", "# be polite about matching the output type to the input type(s) if", "existence of known typos in # the pg 156 publication (notably the fh2-(TL-1)", "# The .mat file 'LinkeTurbidities.mat' contains a single 2160 x 4320 x 12", "180; and the depth (third dimension) represents months of # the year from", "the Haurwitz clear sky model for global horizontal irradiance (GHI) as presented in", "Cloudiness and Cloud Density,\" Journal of Meteorology, vol. 2, pp. 154-166, 1945. [2]", "coefficient\"\"\" p0 = 101325. tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 =", "307-317, 2002. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models:", "precipitable_water: numeric The precipitable water of the atmosphere (cm). Algorithm derived for values", "= 101325. tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 +", "and Perez clear sky model for global horizontal irradiance (GHI), direct normal irradiance", "of year value. # This is approximate and could be made more accurate.", "the # second correction is used to correct the revised turibidity # factor.", "dni = np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi']", "values will result in 0s instead of # negatives. propagates nans. cos_zenith =", "horizontal irradiance (GHI) as presented in [1, 2]. A report on clear sky", "---------- [1] <NAME>, \"Insolation in Relation to Cloudiness and Cloud Density,\" Journal of", "sea level in meters. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine", "[2]_ provides comparisons with other clear sky models. Parameters ---------- apparent_elevation: numeric The", "td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 +", "/= 20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky GHI from Haurwitz", "tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2", "and # the log(w) instead of repeating the calculations as needed in each", "p0 = 101325. tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27", "Reference [1]_ describes the accuracy of the model as being 15, 20, and", "aod700: numeric The aerosol optical depth at 700 nm (unitless). Algorithm derived for", "+ 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub = tb1*aod700 +", "model in eqn. 8, 2) new turbidity # factor in eqn 9 and", "in either the beam or GHI models. The phrasing of # appendix A", "The rows represent global # latitudes from 90 to -90 degrees; the columns", "level and 7000 m, or 101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance.", "be assumed to be equal to 0.2. pressure: numeric The atmospheric pressure (Pascals).", "+ 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b =", "global and beam solar irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\" p =", "numeric Altitude above sea level in meters. dni_extra: numeric Extraterrestrial irradiance. The units", "minimal input data set [3]. Default values for monthly Linke turbidity provided by", "b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 +", "altitudes between sea level and 7000 m, or 101325 and 41000 Pascals. dni_extra:", "2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3, td4, tdp])", "This means that we need to add previous Dec and next Jan #", "Solar Energy, vol 73, pp. 151-157, 2002. [2] <NAME> et. al., \"A New", "np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w)", "td4 = 86*w - 13800, -0.21*w + 11.6 td3 = -3.11*w + 79.4,", "= 0.0089*w + 0.13 taub = tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub", "describes the accuracy of the model as being 15, 20, and 18 W/m^2", "+ 0.868 cg2 = 3.92e-05 * altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 +", "turbidity values found in ``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity : Series", "ensure that the arrays will have the # right shape in the tds", "pd from pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine", "no need to correct the # turbidity factor used in the beam/GHI models.", "to the simplified Solis model [1]_. Reference [1]_ describes the accuracy of the", "i01 = 0.97 * w**0.032 i02 = 0.12 * w**0.56 i0p = i0", "700 nm (unitless). Algorithm derived for values between 0 and 0.45. precipitable_water: numeric", "2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi", "above sea level in meters. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra``", "1.08 * w**0.0051 i01 = 0.97 * w**0.032 i02 = 0.12 * w**0.56", "and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012).", "in the tds calculation. # there's probably a better way to do this.", "3.92e-05 * altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) *", "Implements the Haurwitz clear sky model for global horizontal irradiance (GHI) as presented", "0.2846*aod700 + 0.3798 return g def _calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\"", "the clear sky global and beam solar irradiance,\" Solar Energy, 132, 332-344 (2016).", "_linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used by linke turbidity lookup function \"\"\"", "fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass nans to", "-90, 1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64))", "ImportError('The Linke turbidity lookup table requires scipy. ' + 'You can still use", "tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0", "eqn. 11. They do NOT appear to use the new turbidity factor (item", "Cloud Density,\" Journal of Meteorology, vol. 2, pp. 154-166, 1945. [2] <NAME>, \"Insolation", "Meteorology, vol. 3, pp. 123-124, 1946. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal", "Extreme care should be taken in the interpretation of this result! Parameters ----------", "= _calc_taud(w, aod700, p) d = _calc_d(w, aod700, p) # this prevents the", "between GHI and DNI*cos(zenith) as presented in [1, 2]. A report on clear", "to the ``.mat`` file. interp_turbidity : bool If ``True``, interpolates the monthly Linke", "+ 0.13 taub = tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub def _calc_b(w,", "dni_extra=1364.): \"\"\" Calculate the clear sky GHI, DNI, and DHI according to the", "to December (12). To determine the Linke # turbidity for a position on", "if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat", "0) # BncI = \"normal beam clear sky radiation\" b = 0.664 +", "np.full_like(w, aod700) aod700_mask = aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) #", "---------- apparent_zenith : Series The apparent (refraction corrected) sun zenith angle in degrees.", "pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used by", "appendix A, and 3) Global horizontal model in # eqn. 11. They do", "[4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>, et. al., \"Worldwide Linke Turbidity", "= b * np.exp(-0.09 * airmass_absolute * (tl - 1)) bnci = dni_extra", "# Create the corrected TL for TL < 2 # TLcorr = TL;", "values found in ``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity : Series \"\"\"", "__future__ import division import os from collections import OrderedDict import numpy as np", "each month. # This means that we need to add previous Dec and", "inputrange = inputmax - inputmin outputrange = outputmax - outputmin outputmatrix = (inputmatrix-inputmin)", "numeric The apparent elevation of the sun above the horizon (deg). aod700: numeric", "= (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) #", "p) d = _calc_d(w, aod700, p) # this prevents the creation of nans", "GHI and DNI*cos(zenith) as presented in [1, 2]. A report on clear sky", "= scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160))", "Series input) or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``.", "mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index = (", "propagate through to the output. This # is accomplished by judicious use and", "bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 =", "taub = tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate", "are 20 * Linke Turbidity, # so divide the number from the file", "so divide the number from the file by 20 to get the #", "= 1.08 * w**0.0051 i01 = 0.97 * w**0.032 i02 = 0.12 *", "performance with a minimal input data set [3]. Default values for monthly Linke", "from January (1) to December (12). To determine the Linke # turbidity for", "daily values. Returns ------- turbidity : Series \"\"\" # The .mat file 'LinkeTurbidities.mat'", "is reasonably fast already, but it could be made # faster by precalculating", "g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky", "+ tb0 + tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\"", "to the array so that the interpolation will work for # Jan 1", "307-317 (2002). It is slightly different # than the equation given in Solar", "+ 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0", "np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask = aod700 < 0.05 aod700_mask = np.array([aod700_mask,", "represent global longitudes # from -180 to 180; and the depth (third dimension)", "clear sky global and beam solar irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\"", "(2-TLcorr(TL < 2)) .^ (0.5); # This equation is found in Solar Energy", "interp_turbidity: # Data covers 1 year. # Assume that data corresponds to the", "+ 0.163/fh1 bnci = b * np.exp(-0.09 * airmass_absolute * (tl - 1))", "= taud[0] return taud def _calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0", "for Satellite-Derived Irradiances: Description and Validation\", Solar Energy, vol 73, pp. 307-317, 2002.", "_calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w, aod700, p) b = _calc_b(w, aod700)", "the taug coefficient\"\"\" p0 = 101325. tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2", "pages 151-157, Ineichen and Perez introduce (among other # things) three things. 1)", "OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- ..", "as pd from pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): '''", "on clear sky models found the Ineichen/Perez model to have excellent performance with", "cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0]", "9 and appendix A, and 3) Global horizontal model in # eqn. 11.", "[1]_ describes the accuracy of the model as being 15, 20, and 18", "found in Solar Energy 73, pg 311. Full ref: Perez # et. al.,", "columns/keys ``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME>", "0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug = tg1*aod700 + tg0 + tgp*np.log(p/p0)", "outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1.,", "0.2. pressure: numeric The atmospheric pressure (Pascals). Algorithm derived for altitudes between sea", "Linke turbidity provided by SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric Refraction corrected", "Journal of Meteorology, vol. 3, pp. 123-124, 1946. [3] <NAME>, <NAME>, and <NAME>,", "in [1, 2]. A report on clear sky models found the Haurwitz model", "SE 73, 157 & SE 73, 312. bnci_2 = ((1 - (0.1 -", "= 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2", "\"Worldwide Linke Turbidity Information\", Proc. ISES Solar World Congress, June 2003. Goteborg, Sweden.", "known typos in # the pg 156 publication (notably the fh2-(TL-1) should be", "aod700) taug = _calc_taug(w, aod700, p) g = _calc_g(w, aod700) taud = _calc_taud(w,", "Algorithm derived for values between 0.2 and 10 cm. Values less than 0.2", "airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude above", "airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude above sea level in meters.", "Type,\" Journal of Meteorology, vol. 3, pp. 123-124, 1946. [3] <NAME>, <NAME>, and", "accuracy of the model as being 15, 20, and 18 W/m^2 for the", "of # appendix A seems as if there are two separate corrections, the", "of # negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1", "input data set [3]. Default values for monthly Linke turbidity provided by SoDa", "73, 157 & SE 73, 312. bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1", "on the Earth's surface for a given month do the # following: LT", "the Haurwitz clear-sky model. Initial implementation of this algorithm by <NAME>. References ----------", "aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0 = 101325. tg1 = 1.24 +", "0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug = tg1*aod700 + tg0", "\"\"\" # The .mat file 'LinkeTurbidities.mat' contains a single 2160 x 4320 x", "90 to -90 degrees; the columns represent global longitudes # from -180 to", "(tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be", "between 0 and 0.45. precipitable_water: numeric The precipitable water of the atmosphere (cm).", "raise ImportError('The Linke turbidity lookup table requires scipy. ' + 'You can still", "/ cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2)", "[2] <NAME> et. al., \"A New Operational Model for Satellite-Derived Irradiances: Description and", "slightly different # than the equation given in Solar Energy 73, pg 156.", "0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug = tg1*aod700", "Goteborg, Sweden. ''' # Dan's note on the TL correction: By my reading", "p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 = 1.08 *", "= np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g)", "def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky GHI, DNI, and", "0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b", "np.maximum, # np.minimum, and np.fmax # use max so that nighttime values will", "0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1", "def _calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0 = 101325. tb1 =", "aod700 >= 0.05 td4 = 86*w - 13800, -0.21*w + 11.6 td3 =", "SE 73, 312. bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) /", "right shape in the tds calculation. # there's probably a better way to", "= ghi irrads['dni'] = dni irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads =", "latitudes from 90 to -90 degrees; the columns represent global longitudes # from", "= ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 = ghi", "be fh2 * # (TL-1)). # The NaN handling is a little subtle.", "= b1 * np.log(w) + b0 return b def _calc_taug(w, aod700, p): \"\"\"Calculate", "2 above) in either the beam or GHI models. The phrasing of #", "73, pg 156. We used the # equation from pg 311 because of", "(refraction corrected) sun zenith angle in degrees. Returns ------- pd.Series The modeled global", "p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests needed to ensure that the", "* (i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700,", "(1) to December (12). To determine the Linke # turbidity for a position", "negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.)", "accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity =", "\"A broadband simplified version of the Solis clear sky model,\" Solar Energy, 82,", "clear sky GHI from Haurwitz model. Implements the Haurwitz clear sky model for", "the interpolation will work for # Jan 1 - Jan 15 and Dec", "used in the beam/GHI models. # Create the corrected TL for TL <", "the best performance of models which require only zenith angle [3]. Extreme care", "Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith", "the middle of each month. # This means that we need to add", "np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 =", "101325. dp = 1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 + 0.116", "of each month. # This means that we need to add previous Dec", "18 W/m^2 for the beam, global, and diffuse components. Reference [2]_ provides comparisons", "+ io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the taub", "The atmospheric pressure (Pascals). Algorithm derived for altitudes between sea level and 7000", "_calc_g(w, aod700) taud = _calc_taud(w, aod700, p) d = _calc_d(w, aod700, p) #", "aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0 = 101325. tb1 = 1.82 +", "we map the month value to the day of year value. # This", "The apparent elevation of the sun above the horizon (deg). aod700: numeric The", "101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine", "Validation\", Solar Energy, vol 73, pp. 307-317, 2002. [3] <NAME>, <NAME>, and <NAME>,", "dhi = i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] =", "+ tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be polite about matching", "(notably the fh2-(TL-1) should be fh2 * # (TL-1)). # The NaN handling", "0.13 taub = tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub def _calc_b(w, aod700):", "LongitudeIndex, month). # Note that the numbers within the matrix are 20 *", "to use the new turbidity factor (item # 2 above) in either the", "= w.copy() w[w < 0.2] = 0.2 # this algorithm is reasonably fast", "Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17,", "this algorithm by <NAME>. References ---------- [1] <NAME>, \"Insolation in Relation to Cloudiness", "beam solar irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\" p = pressure w", "np.minimum, and np.fmax # use max so that nighttime values will result in", "new turbidity factor (item # 2 above) in either the beam or GHI", "is no need to correct the # turbidity factor used in the beam/GHI", "380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time)", "formulation for the Linke turbidity coefficient\", Solar Energy, vol 73, pp. 151-157, 2002.", "- 1)) bnci = dni_extra * np.fmax(bnci, 0) # \"empirical correction\" SE 73,", "(among other # things) three things. 1) Beam model in eqn. 8, 2)", "-180 to 180; and the depth (third dimension) represents months of # the", "0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub = tb1*aod700 + tb0 + tbp*np.log(p/p0)", "and np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700,", "given in Solar Energy 73, pg 156. We used the # equation from", "[1, 2]. A report on clear sky models found the Ineichen/Perez model to", "aod700): \"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 +", "irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0,", "/ tl * np.fmax(ghi, 0) # BncI = \"normal beam clear sky radiation\"", "= -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1)", "return irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0", "<NAME>, \"Insolation in Relation to Cloudiness and Cloud Density,\" Journal of Meteorology, vol.", "13800, -0.21*w + 11.6 td3 = -3.11*w + 79.4, 0.27*w - 20.7 td2", "with a minimal input data set [3]. Default values for monthly Linke turbidity", "- 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20)", "5.09e-05 * altitude + 0.868 cg2 = 3.92e-05 * altitude + 0.0387 ghi", "# right shape in the tds calculation. # there's probably a better way", "or GHI models. The phrasing of # appendix A seems as if there", "0) # \"empirical correction\" SE 73, 157 & SE 73, 312. bnci_2 =", "data file supplied with pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude : float", "len(taud) == 1: taud = taud[0] return taud def _calc_d(w, aod700, p): \"\"\"Calculate", "# factor in eqn 9 and appendix A, and 3) Global horizontal model", "New airmass independent formulation for the Linke turbidity coefficient\", Solar Energy, vol 73,", "0.25 .* (2-TLcorr(TL < 2)) .^ (0.5); # This equation is found in", ": bool If ``True``, interpolates the monthly Linke turbidity values found in ``LinkeTurbidities.mat``", ".. [2] <NAME>, \"Validation of models that estimate the clear sky global and", "pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly data", "the ``.mat`` file. interp_turbidity : bool If ``True``, interpolates the monthly Linke turbidity", "[4, 5]. Parameters ----------- apparent_zenith: numeric Refraction corrected solar zenith angle in degrees.", "the equation given in Solar Energy 73, pg 156. We used the #", "collections import OrderedDict import numpy as np import pandas as pd from pvlib", "elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask = aod700 < 0.05 aod700_mask =", "probably a better way to do this. if np.isscalar(w) and np.isscalar(aod700): w =", "``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude :", "# use fmax to map airmass nans to 0s. multiply and divide by", "taub coefficient\"\"\" p0 = 101325. tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0", "-0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0", "1 - Jan 15 and Dec 16 - Dec 31. # Then we", "month do the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that", "irradiance (GHI), direct normal irradiance (DNI), and calculates the clear-sky diffuse horizontal (DHI)", "sky GHI from Haurwitz model. Implements the Haurwitz clear sky model for global", "arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- .. [1] <NAME>,", "map the month value to the day of year value. # This is", "_calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0 = 101325. dp = 1/(18", "to the output. This # is accomplished by judicious use and placement of", "want to map to 0s in the output. However, we # want NaNs", "# negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 =", "Dec and next Jan # to the array so that the interpolation will", "Returns ------- clearsky : DataFrame (if Series input) or OrderedDict of arrays DataFrame/OrderedDict", "0.0089*w + 0.13 taub = tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub def", "and DHI from Ineichen/Perez model. Implements the Ineichen and Perez clear sky model", "i02 = 0.12 * w**0.56 i0p = i0 * (i02*aod700**2 + i01*aod700 +", "than the equation given in Solar Energy 73, pg 156. We used the", "- Dec 31. # Then we map the month value to the day", "in each # function i0p = _calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w,", "altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) #", "= tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate the", "+ 'supply your own turbidities.') if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath", "* sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi", "< 2) - 0.25 .* (2-TLcorr(TL < 2)) .^ (0.5); # This equation", "# eqn. 11. They do NOT appear to use the new turbidity factor", "The modeled global horizonal irradiance in W/m^2 provided by the Haurwitz clear-sky model.", "of the publication # on pages 151-157, Ineichen and Perez introduce (among other", "the file by 20 to get the # turbidity. try: import scipy.io except", "Irradiance Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4]", "+ 'You can still use clearsky.ineichen if you ' + 'supply your own", "the depth (third dimension) represents months of # the year from January (1)", "made # faster by precalculating the powers of aod700, the log(p/p0), and #", "Series \"\"\" # The .mat file 'LinkeTurbidities.mat' contains a single 2160 x 4320", "irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] = dhi if", "i0p * np.exp(-taub/sin_elev**b) ghi = i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p", "Calculate the clear sky GHI, DNI, and DHI according to the simplified Solis", "derived for values between 0 and 0.45. precipitable_water: numeric The precipitable water of", "your own turbidities.') if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path,", "(Pascals). Algorithm derived for altitudes between sea level and 7000 m, or 101325", "longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the Linke Turibidity from the ``LinkeTurbidities.mat`` data", "between sea level and 7000 m, or 101325 and 41000 Pascals. dni_extra: numeric", "Relation to Cloudiness and Cloud Density,\" Journal of Meteorology, vol. 2, pp. 154-166,", "axis=1) linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky GHI", "model. Implements the Haurwitz clear sky model for global horizontal irradiance (GHI) as", "month). # Note that the numbers within the matrix are 20 * Linke", "w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask = aod700", "+ 0.3798 return g def _calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" #", "# the log(w) instead of repeating the calculations as needed in each #", "\"Global Horizontal Irradiance Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389,", "of coefficients for # aod700 < 0.05, aod700 >= 0.05 td4 = 86*w", "friendly to scalar and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p", "1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp", "8.86, 0.0554*w - 5.71 td0 = 0.0042*w + 3.12, 0.0057*w + 2.94 tdp", "supplied with pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude : float longitude :", "'You can still use clearsky.ineichen if you ' + 'supply your own turbidities.')", "101325. tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w)", "BncI = \"normal beam clear sky radiation\" b = 0.664 + 0.163/fh1 bnci", "tb1*aod700 + tb0 + tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate the b", "20.7 td2 = -0.23*w + 74.8, -0.134*w + 15.5 td1 = 0.092*w -", "\"A New airmass independent formulation for the Linke turbidity coefficient\", Solar Energy, vol", "arrays will have the # right shape in the tds calculation. # there's", "- 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass nans to 0s.", "0.868 cg2 = 3.92e-05 * altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl", "= 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2", "''' Determine clear sky GHI from Haurwitz model. Implements the Haurwitz clear sky", "1946. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation", "aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 = 1.08", "w = np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w) elif", "+ tbp*np.log(p/p0) return taub def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 =", "np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith irrads", "models which require only zenith angle [3]. Extreme care should be taken in", "# there's probably a better way to do this. if np.isscalar(w) and np.isscalar(aod700):", "= tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate the", "Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5]", "NaNs in other inputs to propagate through to the output. This # is", "and placement of np.maximum, # np.minimum, and np.fmax # use max so that", "= dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w,", "# factor. In my estimation, there is no need to correct the #", "# 2 above) in either the beam or GHI models. The phrasing of", "the b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2", "interpretation of this result! Parameters ---------- apparent_zenith : Series The apparent (refraction corrected)", "+ 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use", "fails for pw < 0.2 if np.isscalar(w): w = 0.2 if w <", "float longitude : float filepath : string The path to the ``.mat`` file.", "turbidity lookup table requires scipy. ' + 'You can still use clearsky.ineichen if", "be made # faster by precalculating the powers of aod700, the log(p/p0), and", "normal irradiance (DNI), and calculates the clear-sky diffuse horizontal (DHI) component as the", "that nighttime values will result in 0s instead of # negatives. propagates nans.", "and 0.45. precipitable_water: numeric The precipitable water of the atmosphere (cm). Algorithm derived", "aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of coefficients", "b def _calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0 = 101325. tg1", "Solar Energy, 132, 332-344 (2016). \"\"\" p = pressure w = precipitable_water #", "w < 0.2 else w else: w = w.copy() w[w < 0.2] =", "np.isscalar(w): w = 0.2 if w < 0.2 else w else: w =", "import OrderedDict import numpy as np import pandas as pd from pvlib import", "1/(18 + 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 + 0.116 + dp*np.log(p/p0) return", "i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the", "turbidity coefficient\", Solar Energy, vol 73, pp. 151-157, 2002. [2] <NAME> et. al.,", "other clear sky models. Parameters ---------- apparent_elevation: numeric The apparent elevation of the", "correction\" SE 73, 157 & SE 73, 312. bnci_2 = ((1 - (0.1", "<gh_stars>0 \"\"\" The ``clearsky`` module contains several methods to calculate clear sky GHI,", "' + 'supply your own turbidities.') if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__))", "clear sky model,\" Solar Energy, 82, 758-762 (2008). .. [2] <NAME>, \"Validation of", "Sweden. ''' # Dan's note on the TL correction: By my reading of", "sky GHI, DNI, and DHI according to the simplified Solis model [1]_. Reference", "' + 'You can still use clearsky.ineichen if you ' + 'supply your", "clear sky models found the Haurwitz model to have the best performance of", "pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>, \"A New airmass independent formulation for", "aod700 = np.full_like(w, aod700) aod700_mask = aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask],", "Look up the Linke Turibidity from the ``LinkeTurbidities.mat`` data file supplied with pvlib.", "from collections import OrderedDict import numpy as np import pandas as pd from", "1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1 year.", "# TLcorr = TL; # TLcorr(TL < 2) = TLcorr(TL < 2) -", "airmass nans to 0s. multiply and divide by tl to # reinsert tl", "(np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass", "string The path to the ``.mat`` file. interp_turbidity : bool If ``True``, interpolates", "from the file by 20 to get the # turbidity. try: import scipy.io", "w = precipitable_water # algorithm fails for pw < 0.2 if np.isscalar(w): w", "def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used by linke turbidity lookup function", "Linke Turbidity Information\", Proc. ISES Solar World Congress, June 2003. Goteborg, Sweden. '''", "the horizon (deg). aod700: numeric The aerosol optical depth at 700 nm (unitless).", "in degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric", "Parameters ---------- apparent_elevation: numeric The apparent elevation of the sun above the horizon", "you ' + 'supply your own turbidities.') if filepath is None: pvlib_path =", "following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note that the numbers within the", "= precipitable_water # algorithm fails for pw < 0.2 if np.isscalar(w): w =", "calculations as needed in each # function i0p = _calc_i0p(dni_extra, w, aod700, p)", "models that estimate the clear sky global and beam solar irradiance,\" Solar Energy,", "= 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin,", "------- turbidity : Series \"\"\" # The .mat file 'LinkeTurbidities.mat' contains a single", "outputmin, outputmax): \"\"\" used by linke turbidity lookup function \"\"\" inputrange = inputmax", "single 2160 x 4320 x 12 # matrix of type uint8 called 'LinkeTurbidity'.", "the Linke Turibidity from the ``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters ----------", "cg1 * dni_extra * cos_zenith * tl / tl * np.fmax(ghi, 0) #", "the Linke # turbidity for a position on the Earth's surface for a", "to -90 degrees; the columns represent global longitudes # from -180 to 180;", "linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky GHI, DNI, and DHI from Ineichen/Perez", "clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out", "0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub = tb1*aod700 + tb0", "Clear Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5", "want NaNs in other inputs to propagate through to the output. This #", "w, aod700, p) taub = _calc_taub(w, aod700, p) b = _calc_b(w, aod700) taug", "OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. See also --------", "DHI. \"\"\" from __future__ import division import os from collections import OrderedDict import", "\"A New Operational Model for Satellite-Derived Irradiances: Description and Validation\", Solar Energy, vol", "\"\"\" Look up the Linke Turibidity from the ``LinkeTurbidities.mat`` data file supplied with", "of the model as being 15, 20, and 18 W/m^2 for the beam,", "at # the middle of each month. # This means that we need", "it's also friendly to scalar and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni", "AM input is likely to # have NaNs that we'll want to map", "irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads", "if you ' + 'supply your own turbidities.') if filepath is None: pvlib_path", "pg 311. Full ref: Perez # et. al., Vol. 73, pp. 307-317 (2002).", "irradiance (DNI), and calculates the clear-sky diffuse horizontal (DHI) component as the difference", "np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700", "precalculating the powers of aod700, the log(p/p0), and # the log(w) instead of", "file. interp_turbidity : bool If ``True``, interpolates the monthly Linke turbidity values found", "clear sky model for global horizontal irradiance (GHI), direct normal irradiance (DNI), and", "et. al., \"A New Operational Model for Satellite-Derived Irradiances: Description and Validation\", Solar", "turbidity factor used in the beam/GHI models. # Create the corrected TL for", "0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3,", "calculate clear sky GHI, DNI, and DHI. \"\"\" from __future__ import division import", "np.exp(-0.09 * airmass_absolute * (tl - 1)) bnci = dni_extra * np.fmax(bnci, 0)", "= pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial", "for the beam, global, and diffuse components. Reference [2]_ provides comparisons with other", "there's probably a better way to do this. if np.isscalar(w) and np.isscalar(aod700): w", "propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2", "do NOT appear to use the new turbidity factor (item # 2 above)", "= _calc_taub(w, aod700, p) b = _calc_b(w, aod700) taug = _calc_taug(w, aod700, p)", "linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude +", "uint8 called 'LinkeTurbidity'. The rows represent global # latitudes from 90 to -90", "presented in [1, 2]. A report on clear sky models found the Ineichen/Perez", "[1] <NAME>, \"A broadband simplified version of the Solis clear sky model,\" Solar", "- outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation,", "Parameters ---------- apparent_zenith : Series The apparent (refraction corrected) sun zenith angle in", "# this algorithm is reasonably fast already, but it could be made #", "still use clearsky.ineichen if you ' + 'supply your own turbidities.') if filepath", "and Dec 16 - Dec 31. # Then we map the month value", "represent global # latitudes from 90 to -90 degrees; the columns represent global", "The phrasing of # appendix A seems as if there are two separate", "sky models found the Ineichen/Perez model to have excellent performance with a minimal", "The apparent (refraction corrected) sun zenith angle in degrees. Returns ------- pd.Series The", "zenith angle in degrees. Returns ------- pd.Series The modeled global horizonal irradiance in", "irrads['dni'] = dni irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return", "0.3798 return g def _calc_taud(w, aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar", "'supply your own turbidities.') if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath =", "apparent elevation of the sun above the horizon (deg). aod700: numeric The aerosol", "the revised turibidity # factor. In my estimation, there is no need to", "Perez clear sky model for global horizontal irradiance (GHI), direct normal irradiance (DNI),", "pg 311 because of the existence of known typos in # the pg", "of # the year from January (1) to December (12). To determine the", "Determine clear sky GHI, DNI, and DHI from Ineichen/Perez model. Implements the Ineichen", "1e20) dni = np.minimum(bnci, bnci_2) dhi = ghi - dni*cos_zenith irrads = OrderedDict()", "The path to the ``.mat`` file. interp_turbidity : bool If ``True``, interpolates the", "vol. 3, pp. 123-124, 1946. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance", "pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity", "clear sky model for global horizontal irradiance (GHI) as presented in [1, 2].", "table requires scipy. ' + 'You can still use clearsky.ineichen if you '", "return taud def _calc_d(w, aod700, p): \"\"\"Calculate the d coefficient.\"\"\" p0 = 101325.", "ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi']", "The aerosol optical depth at 700 nm (unitless). Algorithm derived for values between", "- 20.7 td2 = -0.23*w + 74.8, -0.134*w + 15.5 td1 = 0.092*w", "ghi = cg1 * dni_extra * cos_zenith * tl / tl * np.fmax(ghi,", "monthly Linke turbidity values found in ``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity", "contains the columns/keys ``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ----------", "bnci = b * np.exp(-0.09 * airmass_absolute * (tl - 1)) bnci =", "aod700_mask = aod700 < 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples", "References ---------- .. [1] <NAME>, \"A broadband simplified version of the Solis clear", "so that nighttime values will result in 0s instead of # negatives. propagates", "(GHI) as presented in [1, 2]. A report on clear sky models found", "in the output. However, we # want NaNs in other inputs to propagate", "((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 = ghi *", "311 because of the existence of known typos in # the pg 156", "numbers within the matrix are 20 * Linke Turbidity, # so divide the", "by <NAME>. References ---------- [1] <NAME>, \"Insolation in Relation to Cloudiness and Cloud", "132, 332-344 (2016). \"\"\" p = pressure w = precipitable_water # algorithm fails", "- 0.25 .* (2-TLcorr(TL < 2)) .^ (0.5); # This equation is found", ".mat file 'LinkeTurbidities.mat' contains a single 2160 x 4320 x 12 # matrix", "that the arrays will have the # right shape in the tds calculation.", "the # turbidity factor used in the beam/GHI models. # Create the corrected", "= ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity:", "seems as if there are two separate corrections, the # first correction is", "# to the array so that the interpolation will work for # Jan", "model as being 15, 20, and 18 W/m^2 for the beam, global, and", "correct the revised turibidity # factor. In my estimation, there is no need", "+ 3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1,", "reinsert tl nans ghi = cg1 * dni_extra * cos_zenith * tl /", "1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass nans to 0s. multiply", "it could be made # faster by precalculating the powers of aod700, the", ".. [1] <NAME>, \"A broadband simplified version of the Solis clear sky model,\"", "clear sky models. Parameters ---------- apparent_elevation: numeric The apparent elevation of the sun", "+ 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub = tb1*aod700 + tb0 +", "pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate the", "numeric The aerosol optical depth at 700 nm (unitless). Algorithm derived for values", "introduce (among other # things) three things. 1) Beam model in eqn. 8,", "20, and 18 W/m^2 for the beam, global, and diffuse components. Reference [2]_", "return taug def _calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w) -", "algorithm is reasonably fast already, but it could be made # faster by", "-0.21*w + 11.6 td3 = -3.11*w + 79.4, 0.27*w - 20.7 td2 =", "* tl / tl * np.fmax(ghi, 0) # BncI = \"normal beam clear", "# than the equation given in Solar Energy 73, pg 156. We used", "Haurwitz model. Implements the Haurwitz clear sky model for global horizontal irradiance (GHI)", "models found the Haurwitz model to have the best performance of models which", "factor (item # 2 above) in either the beam or GHI models. The", "linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply", "73, 312. bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith)", "Operational Model for Satellite-Derived Irradiances: Description and Validation\", Solar Energy, vol 73, pp.", "the output. However, we # want NaNs in other inputs to propagate through", "global longitudes # from -180 to 180; and the depth (third dimension) represents", "that we'll want to map to 0s in the output. However, we #", "0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of coefficients for #", "# This means that we need to add previous Dec and next Jan", "of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- .. [1]", "aod700, p) b = _calc_b(w, aod700) taug = _calc_taug(w, aod700, p) g =", "from the ``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters ---------- time : pandas.DatetimeIndex", "middle of each month. # This means that we need to add previous", "151-157, Ineichen and Perez introduce (among other # things) three things. 1) Beam", "DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. References ---------- .. [1] <NAME>, \"A", "models, and the # second correction is used to correct the revised turibidity", "< 2) = TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL < 2)) .^", "value to the day of year value. # This is approximate and could", "et. al., Vol. 73, pp. 307-317 (2002). It is slightly different # than", "1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g", "and 7000 m, or 101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The", "- 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b = b1 *", "accomplished by judicious use and placement of np.maximum, # np.minimum, and np.fmax #", "# Then we map the month value to the day of year value.", "for values between 0.2 and 10 cm. Values less than 0.2 will be", "input) or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. See", "+ 0.5057*aod700 + 0.4557 b = b1 * np.log(w) + b0 return b", "= pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly", "inputmax, outputmin, outputmax): \"\"\" used by linke turbidity lookup function \"\"\" inputrange =", "sky model for global horizontal irradiance (GHI), direct normal irradiance (DNI), and calculates", "values between 0.2 and 10 cm. Values less than 0.2 will be assumed", "# latitudes from 90 to -90 degrees; the columns represent global longitudes #", "+ 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w +", "and DHI according to the simplified Solis model [1]_. Reference [1]_ describes the", "0.092*w - 8.86, 0.0554*w - 5.71 td0 = 0.0042*w + 3.12, 0.0057*w +", "pressure (Pascals). Algorithm derived for altitudes between sea level and 7000 m, or", "Parameters ---------- time : pandas.DatetimeIndex latitude : float longitude : float filepath :", "* np.fmax(ghi, 0) # BncI = \"normal beam clear sky radiation\" b =", "Ineichen and Perez clear sky model for global horizontal irradiance (GHI), direct normal", "to Cloudiness and Cloud Density,\" Journal of Meteorology, vol. 2, pp. 154-166, 1945.", "global horizontal irradiance (GHI) as presented in [1, 2]. A report on clear", "clear sky GHI, DNI, and DHI according to the simplified Solis model [1]_.", "Energy 73, pg 156. We used the # equation from pg 311 because", "taken in the interpretation of this result! Parameters ---------- apparent_zenith : Series The", "= ghi - dni*cos_zenith irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni", "the taud coefficient.\"\"\" # isscalar tests needed to ensure that the arrays will", "td2 = -0.23*w + 74.8, -0.134*w + 15.5 td1 = 0.092*w - 8.86,", "* w**0.032 i02 = 0.12 * w**0.56 i0p = i0 * (i02*aod700**2 +", "isscalar tests needed to ensure that the arrays will have the # right", "beam/GHI models. # Create the corrected TL for TL < 2 # TLcorr", "20 * Linke Turbidity, # so divide the number from the file by", "to the day of year value. # This is approximate and could be", "W/m^2 provided by the Haurwitz clear-sky model. Initial implementation of this algorithm by", "SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith)", "tl nans ghi = cg1 * dni_extra * cos_zenith * tl / tl", "tbp = 0.0089*w + 0.13 taub = tb1*aod700 + tb0 + tbp*np.log(p/p0) return", "x: g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear", "factor in eqn 9 and appendix A, and 3) Global horizontal model in", "= 5.09e-05 * altitude + 0.868 cg2 = 3.92e-05 * altitude + 0.0387", "is likely to # have NaNs that we'll want to map to 0s", "dtype=np.int) # create tuples of coefficients for # aod700 < 0.05, aod700 >=", "to correct the beam/GHI models, and the # second correction is used to", "= -0.23*w + 74.8, -0.134*w + 15.5 td1 = 0.092*w - 8.86, 0.0554*w", "# is accomplished by judicious use and placement of np.maximum, # np.minimum, and", "b = b1 * np.log(w) + b0 return b def _calc_taug(w, aod700, p):", "31. # Then we map the month value to the day of year", "the output. This # is accomplished by judicious use and placement of np.maximum,", "+ 15.5 td1 = 0.092*w - 8.86, 0.0554*w - 5.71 td0 = 0.0042*w", "SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>, et. al., \"Worldwide", "January (1) to December (12). To determine the Linke # turbidity for a", "Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0", "degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric Linke Turbidity. altitude: numeric Altitude", "# want NaNs in other inputs to propagate through to the output. This", "zenith angle [3]. Extreme care should be taken in the interpretation of this", "shape in the tds calculation. # there's probably a better way to do", "1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp", "pg 156 publication (notably the fh2-(TL-1) should be fh2 * # (TL-1)). #", "and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi", "equation given in Solar Energy 73, pg 156. We used the # equation", "\"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798", "See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>, \"A New", "comparisons with other clear sky models. Parameters ---------- apparent_elevation: numeric The apparent elevation", "``dni_extra`` determine the units of the output. Returns ------- clearsky : DataFrame (if", "the matrix are 20 * Linke Turbidity, # so divide the number from", "TLcorr = TL; # TLcorr(TL < 2) = TLcorr(TL < 2) - 0.25", "irradiance in W/m^2 provided by the Haurwitz clear-sky model. Initial implementation of this", "turbidities.') if filepath is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat')", "# turbidity. try: import scipy.io except ImportError: raise ImportError('The Linke turbidity lookup table", "next Jan # to the array so that the interpolation will work for", "20 to get the # turbidity. try: import scipy.io except ImportError: raise ImportError('The", "_calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0 = 101325. tg1 = 1.24", "86*w - 13800, -0.21*w + 11.6 td3 = -3.11*w + 79.4, 0.27*w -", "Ineichen and Perez introduce (among other # things) three things. 1) Beam model", "tl * np.fmax(ghi, 0) # BncI = \"normal beam clear sky radiation\" b", "w.copy() w[w < 0.2] = 0.2 # this algorithm is reasonably fast already,", "have NaNs that we'll want to map to 0s in the output. However,", "= 0.12 * w**0.56 i0p = i0 * (i02*aod700**2 + i01*aod700 + io0", "surface for a given month do the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex,", "month. # This means that we need to add previous Dec and next", "of the atmosphere (cm). Algorithm derived for values between 0.2 and 10 cm.", "A report on clear sky models found the Ineichen/Perez model to have excellent", "Extraterrestrial irradiance. The units of ``dni_extra`` determine the units of the output. Returns", "separate corrections, the # first correction is used to correct the beam/GHI models,", "numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the units of the output.", "direct normal irradiance (DNI), and calculates the clear-sky diffuse horizontal (DHI) component as", "angle [3]. Extreme care should be taken in the interpretation of this result!", "provides comparisons with other clear sky models. Parameters ---------- apparent_elevation: numeric The apparent", "performance of models which require only zenith angle [3]. Extreme care should be", "* outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\"", "# the year from January (1) to December (12). To determine the Linke", "months of # the year from January (1) to December (12). To determine", "can still use clearsky.ineichen if you ' + 'supply your own turbidities.') if", "horizontal model in # eqn. 11. They do NOT appear to use the", "in degrees. Returns ------- pd.Series The modeled global horizonal irradiance in W/m^2 provided", "(third dimension) represents months of # the year from January (1) to December", "np.fmax(ghi, 0) # BncI = \"normal beam clear sky radiation\" b = 0.664", "cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return", "5]. Parameters ----------- apparent_zenith: numeric Refraction corrected solar zenith angle in degrees. airmass_absolute:", "Data covers 1 year. # Assume that data corresponds to the value at", "function \"\"\" inputrange = inputmax - inputmin outputrange = outputmax - outputmin outputmatrix", "= np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity =", "report on clear sky models found the Ineichen/Perez model to have excellent performance", "solar irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\" p = pressure w =", "[3]. Extreme care should be taken in the interpretation of this result! Parameters", "-------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>, \"A New airmass independent", "less than 0.2 will be assumed to be equal to 0.2. pressure: numeric", "al., \"Worldwide Linke Turbidity Information\", Proc. ISES Solar World Congress, June 2003. Goteborg,", "# The NaN handling is a little subtle. The AM input is likely", "pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky", "the beam/GHI models, and the # second correction is used to correct the", "radiation\" b = 0.664 + 0.163/fh1 bnci = b * np.exp(-0.09 * airmass_absolute", "156. We used the # equation from pg 311 because of the existence", "pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear sky GHI, DNI, and DHI according to", "3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2,", "_calc_g(w, aod700): \"\"\"Calculate the g coefficient.\"\"\" g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700", "solar zenith angle in degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity: numeric Linke", "if w < 0.2 else w else: w = w.copy() w[w < 0.2]", "0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni", "aerosol optical depth at 700 nm (unitless). Algorithm derived for values between 0", "use max so that nighttime values will result in 0s instead of #", "if np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700 = np.array([aod700]) elif np.isscalar(w): w", "73, pp. 307-317 (2002). It is slightly different # than the equation given", "elevation of the sun above the horizon (deg). aod700: numeric The aerosol optical", "101325. tg1 = 1.24 + 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w)", "pw < 0.2 if np.isscalar(w): w = 0.2 if w < 0.2 else", "days = np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity", "+ 152*aod700) d = -0.337*aod700**2 + 0.63*aod700 + 0.116 + dp*np.log(p/p0) return d", "\"\"\"Calculate the d coefficient.\"\"\" p0 = 101325. dp = 1/(18 + 152*aod700) d", ": Series The apparent (refraction corrected) sun zenith angle in degrees. Returns -------", "dni_extra * cos_zenith * tl / tl * np.fmax(ghi, 0) # BncI =", "1) Beam model in eqn. 8, 2) new turbidity # factor in eqn", "312. bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2", "to 0s in the output. However, we # want NaNs in other inputs", "from pvlib import tools def ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear", "tds calculation. # there's probably a better way to do this. if np.isscalar(w)", "interpolation will work for # Jan 1 - Jan 15 and Dec 16", "a little subtle. The AM input is likely to # have NaNs that", "\"\"\" from __future__ import division import os from collections import OrderedDict import numpy", "123-124, 1946. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models:", "pp. 307-317, 2002. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky", "DHI according to the simplified Solis model [1]_. Reference [1]_ describes the accuracy", "Linke turbidity coefficient\", Solar Energy, vol 73, pp. 151-157, 2002. [2] <NAME> et.", "turbidity lookup function \"\"\" inputrange = inputmax - inputmin outputrange = outputmax -", "os from collections import OrderedDict import numpy as np import pandas as pd", "Solis model [1]_. Reference [1]_ describes the accuracy of the model as being", "[1] <NAME>, \"Insolation in Relation to Cloudiness and Cloud Density,\" Journal of Meteorology,", "or 101325 and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra``", "altitude + 0.868 cg2 = 3.92e-05 * altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1", "* cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi})", "td1, td2, td3, td4, tdp]) * aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4", "create tuples of coefficients for # aod700 < 0.05, aod700 >= 0.05 td4", "+ 0.882/fh1)) / cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni =", "g2 = np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380, num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear,", "model for global horizontal irradiance (GHI) as presented in [1, 2]. A report", "+ tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be polite about matching the output", "the columns represent global longitudes # from -180 to 180; and the depth", "dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700,", "tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be polite about matching the", "cg1 = 5.09e-05 * altitude + 0.868 cg2 = 3.92e-05 * altitude +", "0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax):", "inputmin, inputmax, outputmin, outputmax): \"\"\" used by linke turbidity lookup function \"\"\" inputrange", "that the numbers within the matrix are 20 * Linke Turbidity, # so", "# create tuples of coefficients for # aod700 < 0.05, aod700 >= 0.05", "\"\"\" p = pressure w = precipitable_water # algorithm fails for pw <", "to 180; and the depth (third dimension) represents months of # the year", "atmospheric pressure (Pascals). Algorithm derived for altitudes between sea level and 7000 m,", "'dni', 'ghi'``. References ---------- .. [1] <NAME>, \"A broadband simplified version of the", "by judicious use and placement of np.maximum, # np.minimum, and np.fmax # use", "154-166, 1945. [2] <NAME>, \"Insolation in Relation to Cloud Type,\" Journal of Meteorology,", "(DNI), and calculates the clear-sky diffuse horizontal (DHI) component as the difference between", "- (0.1 - 0.2*np.exp(-tl))/(0.1 + 0.882/fh1)) / cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2,", "coefficients for # aod700 < 0.05, aod700 >= 0.05 td4 = 86*w -", "creation of nans at night instead of 0s # it's also friendly to", "Solis clear sky model,\" Solar Energy, 82, 758-762 (2008). .. [2] <NAME>, \"Validation", "= tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi < 0] =", "is used to correct the revised turibidity # factor. In my estimation, there", "= linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith): '''", "result! Parameters ---------- apparent_zenith : Series The apparent (refraction corrected) sun zenith angle", "if np.isscalar(w): w = 0.2 if w < 0.2 else w else: w", "aod700, p) taub = _calc_taub(w, aod700, p) b = _calc_b(w, aod700) taug =", "vol 73, pp. 307-317, 2002. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance", "powers of aod700, the log(p/p0), and # the log(w) instead of repeating the", "DNI, and DHI from Ineichen/Perez model. Implements the Ineichen and Perez clear sky", "we # want NaNs in other inputs to propagate through to the output.", "should be fh2 * # (TL-1)). # The NaN handling is a little", "latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude,", "= 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug =", "assumed to be equal to 0.2. pressure: numeric The atmospheric pressure (Pascals). Algorithm", "to daily values. Returns ------- turbidity : Series \"\"\" # The .mat file", "+ i01*aod700 + io0 + 0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate", "estimate the clear sky global and beam solar irradiance,\" Solar Energy, 132, 332-344", "independent formulation for the Linke turbidity coefficient\", Solar Energy, vol 73, pp. 151-157,", "< 0.2] = 0.2 # this algorithm is reasonably fast already, but it", "judicious use and placement of np.maximum, # np.minimum, and np.fmax # use max", "<NAME>. References ---------- [1] <NAME>, \"Insolation in Relation to Cloudiness and Cloud Density,\"", "- 5.71 td0 = 0.0042*w + 3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2),", "fh2 * # (TL-1)). # The NaN handling is a little subtle. The", "found in ``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity : Series \"\"\" #", "of nans at night instead of 0s # it's also friendly to scalar", "= np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude + 0.868 cg2", "the number from the file by 20 to get the # turbidity. try:", "the numbers within the matrix are 20 * Linke Turbidity, # so divide", "horizontal (DHI) component as the difference between GHI and DNI*cos(zenith) as presented in", "apparent_zenith : Series The apparent (refraction corrected) sun zenith angle in degrees. Returns", "( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180,", "appendix A seems as if there are two separate corrections, the # first", "= (inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325.,", "SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric Refraction corrected solar zenith angle in", "in meters. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the units", "be made more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380,", "linke_turbidity_table[latitude_index][longitude_index] if interp_turbidity: # Data covers 1 year. # Assume that data corresponds", "The AM input is likely to # have NaNs that we'll want to", ": float filepath : string The path to the ``.mat`` file. interp_turbidity :", "= cg1 * dni_extra * cos_zenith * tl / tl * np.fmax(ghi, 0)", "the clear-sky diffuse horizontal (DHI) component as the difference between GHI and DNI*cos(zenith)", "= i0p * np.exp(-taug/sin_elev**g) * sin_elev dhi = i0p * np.exp(-taud/sin_elev**d) irrads =", "the output. Returns ------- clearsky : DataFrame (if Series input) or OrderedDict of", "0.2 if np.isscalar(w): w = 0.2 if w < 0.2 else w else:", "np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700)", "79.4, 0.27*w - 20.7 td2 = -0.23*w + 74.8, -0.134*w + 15.5 td1", "for global horizontal irradiance (GHI) as presented in [1, 2]. A report on", "outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1, precipitable_water=1., pressure=101325., dni_extra=1364.): \"\"\" Calculate the clear", "components. Reference [2]_ provides comparisons with other clear sky models. Parameters ---------- apparent_elevation:", "Then we map the month value to the day of year value. #", "is accomplished by judicious use and placement of np.maximum, # np.minimum, and np.fmax", "2160 x 4320 x 12 # matrix of type uint8 called 'LinkeTurbidity'. The", "i0p = _calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w, aod700, p) b =", "''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 * cos_zenith * np.exp(-0.059/cos_zenith) clearsky_ghi[clearsky_ghi <", "type(s) if len(taud) == 1: taud = taud[0] return taud def _calc_d(w, aod700,", "-0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g def _calc_taud(w, aod700, p):", "correct the # turbidity factor used in the beam/GHI models. # Create the", "~aod700_mask], dtype=np.int) # create tuples of coefficients for # aod700 < 0.05, aod700", "Reference [2]_ provides comparisons with other clear sky models. Parameters ---------- apparent_elevation: numeric", "interp_turbidity : bool If ``True``, interpolates the monthly Linke turbidity values found in", "Parameters ----------- apparent_zenith: numeric Refraction corrected solar zenith angle in degrees. airmass_absolute: numeric", "157 & SE 73, 312. bnci_2 = ((1 - (0.1 - 0.2*np.exp(-tl))/(0.1 +", "Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July", "----------- apparent_zenith: numeric Refraction corrected solar zenith angle in degrees. airmass_absolute: numeric Pressure", "x 4320 x 12 # matrix of type uint8 called 'LinkeTurbidity'. The rows", "other # things) three things. 1) Beam model in eqn. 8, 2) new", "the monthly Linke turbidity values found in ``LinkeTurbidities.mat`` to daily values. Returns -------", "= np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask = aod700 <", "the # first correction is used to correct the beam/GHI models, and the", "and diffuse components. Reference [2]_ provides comparisons with other clear sky models. Parameters", "of repeating the calculations as needed in each # function i0p = _calc_i0p(dni_extra,", "Full ref: Perez # et. al., Vol. 73, pp. 307-317 (2002). It is", "of the existence of known typos in # the pg 156 publication (notably", "_calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2 + 0.0148*aod700 - 0.0172", "turbidity for a position on the Earth's surface for a given month do", "simplified version of the Solis clear sky model,\" Solar Energy, 82, 758-762 (2008).", "else w else: w = w.copy() w[w < 0.2] = 0.2 # this", "turibidity # factor. In my estimation, there is no need to correct the", "# algorithm fails for pw < 0.2 if np.isscalar(w): w = 0.2 if", "note on the TL correction: By my reading of the publication # on", "< 2)) .^ (0.5); # This equation is found in Solar Energy 73,", "fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude + 0.868 cg2 = 3.92e-05", "b = _calc_b(w, aod700) taug = _calc_taug(w, aod700, p) g = _calc_g(w, aod700)", "* np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass nans to 0s. multiply and", "g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity =", "to propagate through to the output. This # is accomplished by judicious use", "p) # this prevents the creation of nans at night instead of 0s", "interpolates the monthly Linke turbidity values found in ``LinkeTurbidities.mat`` to daily values. Returns", "Energy, vol 73, pp. 151-157, 2002. [2] <NAME> et. al., \"A New Operational", "pp. 154-166, 1945. [2] <NAME>, \"Insolation in Relation to Cloud Type,\" Journal of", "coefficient.\"\"\" # isscalar tests needed to ensure that the arrays will have the", "0.05, aod700 >= 0.05 td4 = 86*w - 13800, -0.21*w + 11.6 td3", "tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be polite about matching the output type", "'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90,", "array so that the interpolation will work for # Jan 1 - Jan", "Sky Models: Implementation and Analysis\", Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith =", "num=14) linke_turbidity = pd.Series(np.interp(time.dayofyear, days, g2), index=time) else: linke_turbidity = pd.DataFrame(time.month, index=time) #", "= -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b = b1 * np.log(w) + b0", "corrections, the # first correction is used to correct the beam/GHI models, and", "things. 1) Beam model in eqn. 8, 2) new turbidity # factor in", "dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the units of the", "scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64))", "DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References", "methods to calculate clear sky GHI, DNI, and DHI. \"\"\" from __future__ import", "will be assumed to be equal to 0.2. pressure: numeric The atmospheric pressure", "Turbidity. altitude: numeric Altitude above sea level in meters. dni_extra: numeric Extraterrestrial irradiance.", "values. Returns ------- turbidity : Series \"\"\" # The .mat file 'LinkeTurbidities.mat' contains", "the Linke turbidity coefficient\", Solar Energy, vol 73, pp. 151-157, 2002. [2] <NAME>", "They do NOT appear to use the new turbidity factor (item # 2", "TL correction: By my reading of the publication # on pages 151-157, Ineichen", "with other clear sky models. Parameters ---------- apparent_elevation: numeric The apparent elevation of", "little subtle. The AM input is likely to # have NaNs that we'll", "in Relation to Cloudiness and Cloud Density,\" Journal of Meteorology, vol. 2, pp.", "Relation to Cloud Type,\" Journal of Meteorology, vol. 3, pp. 123-124, 1946. [3]", "ISES Solar World Congress, June 2003. Goteborg, Sweden. ''' # Dan's note on", "type uint8 called 'LinkeTurbidity'. The rows represent global # latitudes from 90 to", "References ---------- [1] <NAME>, \"Insolation in Relation to Cloudiness and Cloud Density,\" Journal", "= i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni", "Returns ------- turbidity : Series \"\"\" # The .mat file 'LinkeTurbidities.mat' contains a", "12 # matrix of type uint8 called 'LinkeTurbidity'. The rows represent global #", "model [1]_. Reference [1]_ describes the accuracy of the model as being 15,", "the Earth's surface for a given month do the # following: LT =", "+ tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be polite", "90, -90, 1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320))", "needed to ensure that the arrays will have the # right shape in", "dni irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def", "determine the Linke # turbidity for a position on the Earth's surface for", "0.2 and 10 cm. Values less than 0.2 will be assumed to be", ": DataFrame (if Series input) or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys", "# faster by precalculating the powers of aod700, the log(p/p0), and # the", "# this prevents the creation of nans at night instead of 0s #", "0.05 td4 = 86*w - 13800, -0.21*w + 11.6 td3 = -3.11*w +", "np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1,", "# Note that the numbers within the matrix are 20 * Linke Turbidity,", "in Solar Energy 73, pg 156. We used the # equation from pg", "< 0.2 else w else: w = w.copy() w[w < 0.2] = 0.2", "* np.log(w) + b0 return b def _calc_taug(w, aod700, p): \"\"\"Calculate the taug", "Assume that data corresponds to the value at # the middle of each", "the month value to the day of year value. # This is approximate", "linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1) linke_turbidity /= 20. return linke_turbidity def haurwitz(apparent_zenith):", "Haurwitz model to have the best performance of models which require only zenith", "and 41000 Pascals. dni_extra: numeric Extraterrestrial irradiance. The units of ``dni_extra`` determine the", "Jan 15 and Dec 16 - Dec 31. # Then we map the", "= 0.0079*w + 0.1 taug = tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug", "instead of repeating the calculations as needed in each # function i0p =", "in Solar Energy 73, pg 311. Full ref: Perez # et. al., Vol.", "excellent performance with a minimal input data set [3]. Default values for monthly", "isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700, p): \"\"\"Calculate", "at 700 nm (unitless). Algorithm derived for values between 0 and 0.45. precipitable_water:", "for monthly Linke turbidity provided by SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric", "= 0.092*w - 8.86, 0.0554*w - 5.71 td0 = 0.0042*w + 3.12, 0.0057*w", "= pressure w = precipitable_water # algorithm fails for pw < 0.2 if", "apparent_elevation: numeric The apparent elevation of the sun above the horizon (deg). aod700:", "longitudes # from -180 to 180; and the depth (third dimension) represents months", "for TL < 2 # TLcorr = TL; # TLcorr(TL < 2) =", "new turbidity # factor in eqn 9 and appendix A, and 3) Global", "factor used in the beam/GHI models. # Create the corrected TL for TL", "_calc_i0p(i0, w, aod700, p): \"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0", "Sandia National Laboratories, SAND2012-2389, 2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>,", "\"\"\"Calculate the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 = 1.08 * w**0.0051", "taub = _calc_taub(w, aod700, p) b = _calc_b(w, aod700) taug = _calc_taug(w, aod700,", "15.5 td1 = 0.092*w - 8.86, 0.0554*w - 5.71 td0 = 0.0042*w +", "turbidity provided by SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric Refraction corrected solar", "p0 = 101325. tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33", "night instead of 0s # it's also friendly to scalar and series inputs", "degrees. Returns ------- pd.Series The modeled global horizonal irradiance in W/m^2 provided by", "different # than the equation given in Solar Energy 73, pg 156. We", "filepath : string The path to the ``.mat`` file. interp_turbidity : bool If", "faster by precalculating the powers of aod700, the log(p/p0), and # the log(w)", "GHI, DNI, and DHI. \"\"\" from __future__ import division import os from collections", "= pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity = linke_turbidity.apply(lambda x: g[x[0]-1], axis=1)", "the ``LinkeTurbidities.mat`` data file supplied with pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude", "divide the number from the file by 20 to get the # turbidity.", "in 0s instead of # negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl", "and 10 cm. Values less than 0.2 will be assumed to be equal", "i0p * np.exp(-taud/sin_elev**d) irrads = OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi']", "< 0] = 0 df_out = pd.DataFrame({'ghi': clearsky_ghi}) return df_out def _linearly_scale(inputmatrix, inputmin,", "better way to do this. if np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700", "vol 73, pp. 151-157, 2002. [2] <NAME> et. al., \"A New Operational Model", "from __future__ import division import os from collections import OrderedDict import numpy as", "and Validation\", Solar Energy, vol 73, pp. 307-317, 2002. [3] <NAME>, <NAME>, and", "revised turibidity # factor. In my estimation, there is no need to correct", "= OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] = dhi if isinstance(dni,", "sky models. Parameters ---------- apparent_elevation: numeric The apparent elevation of the sun above", "contains a single 2160 x 4320 x 12 # matrix of type uint8", "---------- .. [1] <NAME>, \"A broadband simplified version of the Solis clear sky", "+ 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0, td1, td2, td3, td4,", "g = -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g def _calc_taud(w,", "Note that the numbers within the matrix are 20 * Linke Turbidity, #", "else: linke_turbidity = pd.DataFrame(time.month, index=time) # apply monthly data linke_turbidity = linke_turbidity.apply(lambda x:", "None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table", "monthly Linke turbidity provided by SoDa [4, 5]. Parameters ----------- apparent_zenith: numeric Refraction", "from pg 311 because of the existence of known typos in # the", "provided by the Haurwitz clear-sky model. Initial implementation of this algorithm by <NAME>.", "output. Returns ------- clearsky : DataFrame (if Series input) or OrderedDict of arrays", "'LinkeTurbidities.mat' contains a single 2160 x 4320 x 12 # matrix of type", "sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b) ghi = i0p *", "2012. [4] http://www.soda-is.com/eng/services/climat_free_eng.php#c5 (obtained July 17, 2012). [5] <NAME>, et. al., \"Worldwide Linke", "# have NaNs that we'll want to map to 0s in the output.", "report on clear sky models found the Haurwitz model to have the best", "d coefficient.\"\"\" p0 = 101325. dp = 1/(18 + 152*aod700) d = -0.337*aod700**2", "clear sky GHI, DNI, and DHI. \"\"\" from __future__ import division import os", "result in 0s instead of # negatives. propagates nans. cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0)", "* altitude + 0.868 cg2 = 3.92e-05 * altitude + 0.0387 ghi =", "tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0)) # be polite about", "irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time,", "= 101325. io0 = 1.08 * w**0.0051 i01 = 0.97 * w**0.032 i02", "and calculates the clear-sky diffuse horizontal (DHI) component as the difference between GHI", "is None: pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath)", "(DHI) component as the difference between GHI and DNI*cos(zenith) as presented in [1,", "-0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b = b1 * np.log(w) + b0 return", "depth (third dimension) represents months of # the year from January (1) to", "linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index", "care should be taken in the interpretation of this result! Parameters ---------- apparent_zenith", "columns represent global longitudes # from -180 to 180; and the depth (third", "np.exp(0.01*airmass_absolute**1.8)) # use fmax to map airmass nans to 0s. multiply and divide", "to map to 0s in the output. However, we # want NaNs in", "NaNs that we'll want to map to 0s in the output. However, we", "to ensure that the arrays will have the # right shape in the", "= np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of coefficients for # aod700 <", "the year from January (1) to December (12). To determine the Linke #", "value. # This is approximate and could be made more accurate. g2 =", "0.4557 b = b1 * np.log(w) + b0 return b def _calc_taug(w, aod700,", "and <NAME>, \"A New airmass independent formulation for the Linke turbidity coefficient\", Solar", "diffuse horizontal (DHI) component as the difference between GHI and DNI*cos(zenith) as presented", "and DHI. \"\"\" from __future__ import division import os from collections import OrderedDict", "if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def _calc_i0p(i0, w, aod700, p):", "map airmass nans to 0s. multiply and divide by tl to # reinsert", "Sandia National Laboratories, SAND2012-2389, 2012. ''' cos_zenith = tools.cosd(apparent_zenith) clearsky_ghi = 1098.0 *", "which require only zenith angle [3]. Extreme care should be taken in the", "is found in Solar Energy 73, pg 311. Full ref: Perez # et.", "+ 0.047*np.log(w) + 0.0061*np.log(w)**2 tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp =", "month value to the day of year value. # This is approximate and", "scipy. ' + 'You can still use clearsky.ineichen if you ' + 'supply", "publication (notably the fh2-(TL-1) should be fh2 * # (TL-1)). # The NaN", "The .mat file 'LinkeTurbidities.mat' contains a single 2160 x 4320 x 12 #", "# so divide the number from the file by 20 to get the", "day of year value. # This is approximate and could be made more", "to be equal to 0.2. pressure: numeric The atmospheric pressure (Pascals). Algorithm derived", "np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude + 0.868 cg2 =", "\"\"\" The ``clearsky`` module contains several methods to calculate clear sky GHI, DNI,", "of the Solis clear sky model,\" Solar Energy, 82, 758-762 (2008). .. [2]", "and Cloud Density,\" Journal of Meteorology, vol. 2, pp. 154-166, 1945. [2] <NAME>,", "io0 = 1.08 * w**0.0051 i01 = 0.97 * w**0.032 i02 = 0.12", "= 0.00925*aod700**2 + 0.0148*aod700 - 0.0172 b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557", "tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 *", "a single 2160 x 4320 x 12 # matrix of type uint8 called", "al., Vol. 73, pp. 307-317 (2002). It is slightly different # than the", "aod700_mask).sum(axis=1) p0 = 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700", "taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0] + tds[5]*np.log(p/p0))", "Refraction corrected solar zenith angle in degrees. airmass_absolute: numeric Pressure corrected airmass. linke_turbidity:", "# Dan's note on the TL correction: By my reading of the publication", "0s. multiply and divide by tl to # reinsert tl nans ghi =", "* (tl - 1)) bnci = dni_extra * np.fmax(bnci, 0) # \"empirical correction\"", "sky model,\" Solar Energy, 82, 758-762 (2008). .. [2] <NAME>, \"Validation of models", "---------- time : pandas.DatetimeIndex latitude : float longitude : float filepath : string", "_calc_taud(w, aod700, p) d = _calc_d(w, aod700, p) # this prevents the creation", "calculation. # there's probably a better way to do this. if np.isscalar(w) and", "New Operational Model for Satellite-Derived Irradiances: Description and Validation\", Solar Energy, vol 73,", "first correction is used to correct the beam/GHI models, and the # second", "above the horizon (deg). aod700: numeric The aerosol optical depth at 700 nm", "of the output. Returns ------- clearsky : DataFrame (if Series input) or OrderedDict", "precipitable_water # algorithm fails for pw < 0.2 if np.isscalar(w): w = 0.2", "best performance of models which require only zenith angle [3]. Extreme care should", "0.97 * w**0.032 i02 = 0.12 * w**0.56 i0p = i0 * (i02*aod700**2", "(if Series input) or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni',", "except ImportError: raise ImportError('The Linke turbidity lookup table requires scipy. ' + 'You", "[1, 2]. A report on clear sky models found the Haurwitz model to", "correct the beam/GHI models, and the # second correction is used to correct", "sky radiation\" b = 0.664 + 0.163/fh1 bnci = b * np.exp(-0.09 *", "latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the Linke Turibidity from the ``LinkeTurbidities.mat``", "1: taud = taud[0] return taud def _calc_d(w, aod700, p): \"\"\"Calculate the d", "_calc_d(w, aod700, p) # this prevents the creation of nans at night instead", "prevents the creation of nans at night instead of 0s # it's also", "151-157, 2002. [2] <NAME> et. al., \"A New Operational Model for Satellite-Derived Irradiances:", "to # have NaNs that we'll want to map to 0s in the", "td1 = 0.092*w - 8.86, 0.0554*w - 5.71 td0 = 0.0042*w + 3.12,", "eqn 9 and appendix A, and 3) Global horizontal model in # eqn.", "GHI from Haurwitz model. Implements the Haurwitz clear sky model for global horizontal", "tuples of coefficients for # aod700 < 0.05, aod700 >= 0.05 td4 =", "pp. 151-157, 2002. [2] <NAME> et. al., \"A New Operational Model for Satellite-Derived", "[2] <NAME>, \"Validation of models that estimate the clear sky global and beam", "of 0s # it's also friendly to scalar and series inputs sin_elev =", "Returns ------- pd.Series The modeled global horizonal irradiance in W/m^2 provided by the", "pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the", "cos_zenith) bnci_2 = ghi * np.fmin(np.fmax(bnci_2, 0), 1e20) dni = np.minimum(bnci, bnci_2) dhi", "Default values for monthly Linke turbidity provided by SoDa [4, 5]. Parameters -----------", "way to do this. if np.isscalar(w) and np.isscalar(aod700): w = np.array([w]) aod700 =", "# first correction is used to correct the beam/GHI models, and the #", "given month do the # following: LT = LinkeTurbidity(LatitudeIndex, LongitudeIndex, month). # Note", "[1]_. Reference [1]_ describes the accuracy of the model as being 15, 20,", "turbidity # factor in eqn 9 and appendix A, and 3) Global horizontal", "TL < 2 # TLcorr = TL; # TLcorr(TL < 2) = TLcorr(TL", "tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub", "to Cloud Type,\" Journal of Meteorology, vol. 3, pp. 123-124, 1946. [3] <NAME>,", "* altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8))", "each # function i0p = _calc_i0p(dni_extra, w, aod700, p) taub = _calc_taub(w, aod700,", "< 0.05 aod700_mask = np.array([aod700_mask, ~aod700_mask], dtype=np.int) # create tuples of coefficients for", "or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. References ----------", "OrderedDict() irrads['ghi'] = ghi irrads['dni'] = dni irrads['dhi'] = dhi if isinstance(dni, pd.Series):", "and DNI*cos(zenith) as presented in [1, 2]. A report on clear sky models", "Beam model in eqn. 8, 2) new turbidity # factor in eqn 9", "airmass independent formulation for the Linke turbidity coefficient\", Solar Energy, vol 73, pp.", "i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0 = 101325. tb1", "dni_extra=1364.): ''' Determine clear sky GHI, DNI, and DHI from Ineichen/Perez model. Implements", "+ b0 return b def _calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\" p0", "found the Haurwitz model to have the best performance of models which require", "0.2 else w else: w = w.copy() w[w < 0.2] = 0.2 #", "airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky GHI, DNI, and DHI from", "aod700, p) g = _calc_g(w, aod700) taud = _calc_taud(w, aod700, p) d =", "# things) three things. 1) Beam model in eqn. 8, 2) new turbidity", "sun above the horizon (deg). aod700: numeric The aerosol optical depth at 700", "interp_turbidity=True): \"\"\" Look up the Linke Turibidity from the ``LinkeTurbidities.mat`` data file supplied", "tl to # reinsert tl nans ghi = cg1 * dni_extra * cos_zenith", "path to the ``.mat`` file. interp_turbidity : bool If ``True``, interpolates the monthly", "df_out def _linearly_scale(inputmatrix, inputmin, inputmax, outputmin, outputmax): \"\"\" used by linke turbidity lookup", "= 0.664 + 0.163/fh1 bnci = b * np.exp(-0.09 * airmass_absolute * (tl", "data set [3]. Default values for monthly Linke turbidity provided by SoDa [4,", "pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\"", "angle in degrees. Returns ------- pd.Series The modeled global horizonal irradiance in W/m^2", "Turbidity Information\", Proc. ISES Solar World Congress, June 2003. Goteborg, Sweden. ''' #", "a minimal input data set [3]. Default values for monthly Linke turbidity provided", "the calculations as needed in each # function i0p = _calc_i0p(dni_extra, w, aod700,", "[1] <NAME> and <NAME>, \"A New airmass independent formulation for the Linke turbidity", "outputmax - outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix def", "Satellite-Derived Irradiances: Description and Validation\", Solar Energy, vol 73, pp. 307-317, 2002. [3]", "the Ineichen/Perez model to have excellent performance with a minimal input data set", "``'dhi', 'dni', 'ghi'``. References ---------- .. [1] <NAME>, \"A broadband simplified version of", "beam clear sky radiation\" b = 0.664 + 0.163/fh1 bnci = b *", "x 12 # matrix of type uint8 called 'LinkeTurbidity'. The rows represent global", "broadband simplified version of the Solis clear sky model,\" Solar Energy, 82, 758-762", "332-344 (2016). \"\"\" p = pressure w = precipitable_water # algorithm fails for", "ineichen(apparent_zenith, airmass_absolute, linke_turbidity, altitude=0, dni_extra=1364.): ''' Determine clear sky GHI, DNI, and DHI", "the array so that the interpolation will work for # Jan 1 -", "irradiance (GHI) as presented in [1, 2]. A report on clear sky models", "return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky GHI from Haurwitz model. Implements", "Description and Validation\", Solar Energy, vol 73, pp. 307-317, 2002. [3] <NAME>, <NAME>,", "and the depth (third dimension) represents months of # the year from January", "Density,\" Journal of Meteorology, vol. 2, pp. 154-166, 1945. [2] <NAME>, \"Insolation in", "(deg). aod700: numeric The aerosol optical depth at 700 nm (unitless). Algorithm derived", "+ 0.1 taug = tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug def _calc_g(w,", "2002. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky Models: Implementation", "to correct the revised turibidity # factor. In my estimation, there is no", "clear sky models found the Ineichen/Perez model to have excellent performance with a", "np.log(w) + b0 return b def _calc_taug(w, aod700, p): \"\"\"Calculate the taug coefficient\"\"\"", "1945. [2] <NAME>, \"Insolation in Relation to Cloud Type,\" Journal of Meteorology, vol.", "aod700, p): \"\"\"Calculate the taud coefficient.\"\"\" # isscalar tests needed to ensure that", "number from the file by 20 to get the # turbidity. try: import", "Dec 31. # Then we map the month value to the day of", "2160)) .astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g =", "- 13800, -0.21*w + 11.6 td3 = -3.11*w + 79.4, 0.27*w - 20.7", "scalar and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation))) dni = i0p * np.exp(-taub/sin_elev**b)", "as the difference between GHI and DNI*cos(zenith) as presented in [1, 2]. A", "also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>, \"A New airmass", "0.1 taug = tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug def _calc_g(w, aod700):", "lookup table requires scipy. ' + 'You can still use clearsky.ineichen if you", "for the Linke turbidity coefficient\", Solar Energy, vol 73, pp. 151-157, 2002. [2]", "algorithm by <NAME>. References ---------- [1] <NAME>, \"Insolation in Relation to Cloudiness and", "p0 = 101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 +", "to calculate clear sky GHI, DNI, and DHI. \"\"\" from __future__ import division", "0.071*np.log(p/p0)) return i0p def _calc_taub(w, aod700, p): \"\"\"Calculate the taub coefficient\"\"\" p0 =", "- inputmin outputrange = outputmax - outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange +", "pvlib_path = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table =", "0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13", "Perez introduce (among other # things) three things. 1) Beam model in eqn.", "= dni irrads['dhi'] = dhi if isinstance(dni, pd.Series): irrads = pd.DataFrame.from_dict(irrads) return irrads", "* w**0.56 i0p = i0 * (i02*aod700**2 + i01*aod700 + io0 + 0.071*np.log(p/p0))", "output. However, we # want NaNs in other inputs to propagate through to", "mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1,", "# np.minimum, and np.fmax # use max so that nighttime values will result", "we need to add previous Dec and next Jan # to the array", "d = _calc_d(w, aod700, p) # this prevents the creation of nans at", "position on the Earth's surface for a given month do the # following:", "b0 = -0.7565*aod700**2 + 0.5057*aod700 + 0.4557 b = b1 * np.log(w) +", "and np.fmax # use max so that nighttime values will result in 0s", "* airmass_absolute * (tl - 1)) bnci = dni_extra * np.fmax(bnci, 0) #", "equation from pg 311 because of the existence of known typos in #", "log(w) instead of repeating the calculations as needed in each # function i0p", "the sun above the horizon (deg). aod700: numeric The aerosol optical depth at", "correction is used to correct the beam/GHI models, and the # second correction", "(12). To determine the Linke # turbidity for a position on the Earth's", "return taub def _calc_b(w, aod700): \"\"\"Calculate the b coefficient.\"\"\" b1 = 0.00925*aod700**2 +", "0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1))) * np.exp(0.01*airmass_absolute**1.8)) # use fmax", "# it's also friendly to scalar and series inputs sin_elev = np.maximum(1.e-30, np.sin(np.radians(apparent_elevation)))", "and Perez introduce (among other # things) three things. 1) Beam model in", "def haurwitz(apparent_zenith): ''' Determine clear sky GHI from Haurwitz model. Implements the Haurwitz", "0.0042*w + 3.12, 0.0057*w + 2.94 tdp = -0.83*(1+aod700)**(-17.2), -0.71*(1+aod700)**(-15.0) tds = (np.array([td0,", "= 3.92e-05 * altitude + 0.0387 ghi = (np.exp(-cg2*airmass_absolute*(fh1 + fh2*(tl - 1)))", "This is approximate and could be made more accurate. g2 = np.concatenate([[g[-1]], g,", "In my estimation, there is no need to correct the # turbidity factor", "return irrads def lookup_linke_turbidity(time, latitude, longitude, filepath=None, interp_turbidity=True): \"\"\" Look up the Linke", "made more accurate. g2 = np.concatenate([[g[-1]], g, [g[0]]]) days = np.linspace(-15, 380, num=14)", "If ``True``, interpolates the monthly Linke turbidity values found in ``LinkeTurbidities.mat`` to daily", "# This is approximate and could be made more accurate. g2 = np.concatenate([[g[-1]],", "g = _calc_g(w, aod700) taud = _calc_taud(w, aod700, p) d = _calc_d(w, aod700,", "to have excellent performance with a minimal input data set [3]. Default values", "# appendix A seems as if there are two separate corrections, the #", "pp. 123-124, 1946. [3] <NAME>, <NAME>, and <NAME>, \"Global Horizontal Irradiance Clear Sky", "the \"enhanced extraterrestrial irradiance\".\"\"\" p0 = 101325. io0 = 1.08 * w**0.0051 i01", "with pvlib. Parameters ---------- time : pandas.DatetimeIndex latitude : float longitude : float", "taud = _calc_taud(w, aod700, p) d = _calc_d(w, aod700, p) # this prevents", "Cloud Type,\" Journal of Meteorology, vol. 3, pp. 123-124, 1946. [3] <NAME>, <NAME>,", "from Ineichen/Perez model. Implements the Ineichen and Perez clear sky model for global", "as being 15, 20, and 18 W/m^2 for the beam, global, and diffuse", "<NAME>, \"Validation of models that estimate the clear sky global and beam solar", "_calc_taub(w, aod700, p) b = _calc_b(w, aod700) taug = _calc_taug(w, aod700, p) g", "fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude + 0.868", "the model as being 15, 20, and 18 W/m^2 for the beam, global,", "of models that estimate the clear sky global and beam solar irradiance,\" Solar", "= mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90, -90, 1, 2160)) .astype(np.int64)) longitude_index =", "DataFrame (if Series input) or OrderedDict of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi',", "outputmax): \"\"\" used by linke turbidity lookup function \"\"\" inputrange = inputmax -", "lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>, \"A New airmass independent formulation", "W/m^2 for the beam, global, and diffuse components. Reference [2]_ provides comparisons with", "tb1 = 1.82 + 0.056*np.log(w) + 0.0071*np.log(w)**2 tb0 = 0.33 + 0.045*np.log(w) +", "np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w, aod700) aod700_mask =", "Energy 73, pg 311. Full ref: Perez # et. al., Vol. 73, pp.", "= np.array([aod700]) elif np.isscalar(w): w = np.full_like(aod700, w) elif np.isscalar(aod700): aod700 = np.full_like(w,", "get the # turbidity. try: import scipy.io except ImportError: raise ImportError('The Linke turbidity", "and beam solar irradiance,\" Solar Energy, 132, 332-344 (2016). \"\"\" p = pressure", "'data', 'LinkeTurbidities.mat') mat = scipy.io.loadmat(filepath) linke_turbidity_table = mat['LinkeTurbidity'] latitude_index = ( np.around(_linearly_scale(latitude, 90,", "of arrays DataFrame/OrderedDict contains the columns/keys ``'dhi', 'dni', 'ghi'``. See also -------- lookup_linke_turbidity", "'dni', 'ghi'``. See also -------- lookup_linke_turbidity pvlib.location.Location.get_clearsky References ---------- [1] <NAME> and <NAME>,", "``LinkeTurbidities.mat`` to daily values. Returns ------- turbidity : Series \"\"\" # The .mat", "in Relation to Cloud Type,\" Journal of Meteorology, vol. 3, pp. 123-124, 1946.", "- Jan 15 and Dec 16 - Dec 31. # Then we map", "w**0.032 i02 = 0.12 * w**0.56 i0p = i0 * (i02*aod700**2 + i01*aod700", "taug = tg1*aod700 + tg0 + tgp*np.log(p/p0) return taug def _calc_g(w, aod700): \"\"\"Calculate", "cos_zenith = np.maximum(tools.cosd(apparent_zenith), 0) tl = linke_turbidity fh1 = np.exp(-altitude/8000.) fh2 = np.exp(-altitude/1250.)", "= -0.0147*np.log(w) - 0.3079*aod700**2 + 0.2846*aod700 + 0.3798 return g def _calc_taud(w, aod700,", "tds[0] + tds[5]*np.log(p/p0)) # be polite about matching the output type to the", "nm (unitless). Algorithm derived for values between 0 and 0.45. precipitable_water: numeric The", "in W/m^2 provided by the Haurwitz clear-sky model. Initial implementation of this algorithm", "the Ineichen and Perez clear sky model for global horizontal irradiance (GHI), direct", "this prevents the creation of nans at night instead of 0s # it's", "numeric Refraction corrected solar zenith angle in degrees. airmass_absolute: numeric Pressure corrected airmass.", "GHI, DNI, and DHI from Ineichen/Perez model. Implements the Ineichen and Perez clear", "tg0 = 0.27 + 0.043*np.log(w) + 0.0090*np.log(w)**2 tgp = 0.0079*w + 0.1 taug", "DNI, and DHI. \"\"\" from __future__ import division import os from collections import", "tl / tl * np.fmax(ghi, 0) # BncI = \"normal beam clear sky", "(2008). .. [2] <NAME>, \"Validation of models that estimate the clear sky global", "to the input type(s) if len(taud) == 1: taud = taud[0] return taud", "# \"empirical correction\" SE 73, 157 & SE 73, 312. bnci_2 = ((1", "20. return linke_turbidity def haurwitz(apparent_zenith): ''' Determine clear sky GHI from Haurwitz model.", "longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index] if", "precipitable water of the atmosphere (cm). Algorithm derived for values between 0.2 and", "= 0.33 + 0.045*np.log(w) + 0.0096*np.log(w)**2 tbp = 0.0089*w + 0.13 taub =", "= np.exp(-altitude/1250.) cg1 = 5.09e-05 * altitude + 0.868 cg2 = 3.92e-05 *", "December (12). To determine the Linke # turbidity for a position on the", "= outputmax - outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix", "outputmin outputmatrix = (inputmatrix-inputmin) * outputrange/inputrange + outputmin return outputmatrix def simplified_solis(apparent_elevation, aod700=0.1,", ": float longitude : float filepath : string The path to the ``.mat``", "11. They do NOT appear to use the new turbidity factor (item #", "output type to the input type(s) if len(taud) == 1: taud = taud[0]", "factor. In my estimation, there is no need to correct the # turbidity", "TLcorr(TL < 2) - 0.25 .* (2-TLcorr(TL < 2)) .^ (0.5); # This", "and appendix A, and 3) Global horizontal model in # eqn. 11. They", ".astype(np.int64)) longitude_index = ( np.around(_linearly_scale(longitude, -180, 180, 1, 4320)) .astype(np.int64)) g = linke_turbidity_table[latitude_index][longitude_index]", "from Haurwitz model. Implements the Haurwitz clear sky model for global horizontal irradiance", "0.12 * w**0.56 i0p = i0 * (i02*aod700**2 + i01*aod700 + io0 +", "aod700, p) # this prevents the creation of nans at night instead of", "the difference between GHI and DNI*cos(zenith) as presented in [1, 2]. A report", "atmosphere (cm). Algorithm derived for values between 0.2 and 10 cm. Values less", "my reading of the publication # on pages 151-157, Ineichen and Perez introduce", "0.163/fh1 bnci = b * np.exp(-0.09 * airmass_absolute * (tl - 1)) bnci", "to get the # turbidity. try: import scipy.io except ImportError: raise ImportError('The Linke", "being 15, 20, and 18 W/m^2 for the beam, global, and diffuse components.", "GHI, DNI, and DHI according to the simplified Solis model [1]_. Reference [1]_", "101325. taud = (tds[4]*aod700**4 + tds[3]*aod700**3 + tds[2]*aod700**2 + tds[1]*aod700 + tds[0] +" ]
[ "torch def get_state_dict(model): if type(model) == torch.nn.DataParallel: state_dict = model.module.state_dict() else: state_dict =", "if type(model) == torch.nn.DataParallel: state_dict = model.module.state_dict() else: state_dict = model.state_dict() return state_dict", "get_state_dict(model): if type(model) == torch.nn.DataParallel: state_dict = model.module.state_dict() else: state_dict = model.state_dict() return", "import torch def get_state_dict(model): if type(model) == torch.nn.DataParallel: state_dict = model.module.state_dict() else: state_dict", "def get_state_dict(model): if type(model) == torch.nn.DataParallel: state_dict = model.module.state_dict() else: state_dict = model.state_dict()" ]
[ "\"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda: 0) for file in files: with", "= defaultdict(lambda: 0) for file in files: with open(file, \"r\") as fp: for", "nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to", "dst): data = defaultdict(lambda: 0) for file in files: with open(file, \"r\") as", "(default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files, opts.out) if __name__ == \"__main__\": main()", "multiple stackcollapes into a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a", "hits = line.rsplit(\" \", 1) hits = int(hits) data[stack] += hits with open(dst,", "fp: for line in fp.readlines(): stack, hits = line.rsplit(\" \", 1) hits =", "= argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\",", ") parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\",", "type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse", "in files: with open(file, \"r\") as fp: for line in fp.readlines(): stack, hits", "\"r\") as fp: for line in fp.readlines(): stack, hits = line.rsplit(\" \", 1)", "default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:])", "1) hits = int(hits) data[stack] += hits with open(dst, \"w\") as fp: for", "with open(file, \"r\") as fp: for line in fp.readlines(): stack, hits = line.rsplit(\"", "in data.items(): print(stack, hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser(", "line in fp.readlines(): stack, hits = line.rsplit(\" \", 1) hits = int(hits) data[stack]", "DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda: 0) for file in", "one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\",", "argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single one\" ) parser.add_argument(", "hits in data.items(): print(stack, hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser =", "file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into", "\", 1) hits = int(hits) data[stack] += hits with open(dst, \"w\") as fp:", "#!/usr/bin/env python import argparse import sys from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\"", "this file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files, opts.out) if __name__ ==", "open(file, \"r\") as fp: for line in fp.readlines(): stack, hits = line.rsplit(\" \",", "stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file", "= int(hits) data[stack] += hits with open(dst, \"w\") as fp: for stack, hits", "\"w\") as fp: for stack, hits in data.items(): print(stack, hits, file=fp) def main():", "def merge(files, dst): data = defaultdict(lambda: 0) for file in files: with open(file,", "sys from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data =", "fp: for stack, hits in data.items(): print(stack, hits, file=fp) def main(): parser =", "merge(files, dst): data = defaultdict(lambda: 0) for file in files: with open(file, \"r\")", "= line.rsplit(\" \", 1) hits = int(hits) data[stack] += hits with open(dst, \"w\")", "as fp: for stack, hits in data.items(): print(stack, hits, file=fp) def main(): parser", "single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument(", "into a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\"", "stackcollapes into a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse", "resulting stackcollapse to this file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files, opts.out)", "file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files, opts.out) if __name__ == \"__main__\":", "main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single", "with open(dst, \"w\") as fp: for stack, hits in data.items(): print(stack, hits, file=fp)", "python import argparse import sys from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def", "fp.readlines(): stack, hits = line.rsplit(\" \", 1) hits = int(hits) data[stack] += hits", "int(hits) data[stack] += hits with open(dst, \"w\") as fp: for stack, hits in", "to this file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files, opts.out) if __name__", "a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" )", "help=f\"write resulting stackcollapse to this file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files,", "defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda: 0) for file", "hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes", "file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file (default:", "data.items(): print(stack, hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge", "for line in fp.readlines(): stack, hits = line.rsplit(\" \", 1) hits = int(hits)", "for stack, hits in data.items(): print(stack, hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0])", "parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file (default: {DEFAULT_OUT})\", )", "parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single one\" ) parser.add_argument( \"files\",", "description=\"merge multiple stackcollapes into a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\",", "import sys from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data", "as fp: for line in fp.readlines(): stack, hits = line.rsplit(\" \", 1) hits", "defaultdict(lambda: 0) for file in files: with open(file, \"r\") as fp: for line", "for file in files: with open(file, \"r\") as fp: for line in fp.readlines():", "stack, hits in data.items(): print(stack, hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser", "= argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single one\" )", "line.rsplit(\" \", 1) hits = int(hits) data[stack] += hits with open(dst, \"w\") as", "argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single one\" ) parser.add_argument( \"files\", metavar=\"FILE\", type=str,", ") parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file (default: {DEFAULT_OUT})\",", "data[stack] += hits with open(dst, \"w\") as fp: for stack, hits in data.items():", "def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into a", "in fp.readlines(): stack, hits = line.rsplit(\" \", 1) hits = int(hits) data[stack] +=", "\"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write", "parser.add_argument( \"files\", metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT,", "0) for file in files: with open(file, \"r\") as fp: for line in", "+= hits with open(dst, \"w\") as fp: for stack, hits in data.items(): print(stack,", "\"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file (default: {DEFAULT_OUT})\", ) opts", "from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda:", "import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda: 0) for", "files: with open(file, \"r\") as fp: for line in fp.readlines(): stack, hits =", "import argparse import sys from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files,", "open(dst, \"w\") as fp: for stack, hits in data.items(): print(stack, hits, file=fp) def", "hits with open(dst, \"w\") as fp: for stack, hits in data.items(): print(stack, hits,", "collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda: 0)", "stackcollapse to this file (default: {DEFAULT_OUT})\", ) opts = parser.parse_args(sys.argv[1:]) merge(opts.files, opts.out) if", "\"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this file (default: {DEFAULT_OUT})\", ) opts =", "data = defaultdict(lambda: 0) for file in files: with open(file, \"r\") as fp:", "parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple stackcollapes into a single one\"", "file in files: with open(file, \"r\") as fp: for line in fp.readlines(): stack,", "stack, hits = line.rsplit(\" \", 1) hits = int(hits) data[stack] += hits with", "argparse import sys from collections import defaultdict DEFAULT_OUT = \"stackcollapse-merged.txt\" def merge(files, dst):", "print(stack, hits, file=fp) def main(): parser = argparse.ArgumentParser(sys.argv[0]) parser = argparse.ArgumentParser( description=\"merge multiple", "metavar=\"FILE\", type=str, nargs=\"+\", help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting", "hits = int(hits) data[stack] += hits with open(dst, \"w\") as fp: for stack,", "= \"stackcollapse-merged.txt\" def merge(files, dst): data = defaultdict(lambda: 0) for file in files:", "help=\"a stackcollapse file\" ) parser.add_argument( \"-o\", \"--out\", default=DEFAULT_OUT, help=f\"write resulting stackcollapse to this" ]
[ "def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id ==", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu)", "tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()),", "session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema, response: Response,", "await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session =", "request.end_date experience.description = request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def", "return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp", "lang.language = request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language(", "request: EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "== user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if dis: dis.level = request.level dis.type", "= request.name education.city = request.city education.start_date = request.start_date education.end_date = request.end_date session.commit() session.refresh(education)", "def edit_user_education( id: int, request: EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education(", "= DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request:", "deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete() ) if", "int, request: ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "== id) .one_or_none() ) if education: education.edu_type = request.edu_type education.name = request.name education.city", "import db_session from .models.disability import Disability as DisabilityModel from .models.education import Education as", "session: Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\",", "ExperienceModel from .models.language import Language as LanguageModel from .schemas.disability import Disability as DisabilitySchema", ".filter(EducationModel.id == id) .one_or_none() ) if education: education.edu_type = request.edu_type education.name = request.name", "] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session:", "dis.level = request.level dis.type = request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\",", "education.city = request.city education.start_date = request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code", "( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if experience: experience.position", "user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema, response:", "prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup():", "request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int,", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu)", "user.id) .filter(DisabilityModel.id == id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return", "as ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language import Language as LanguageSchema from", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return", "tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB,", "os from fastapi import Depends, FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware from", "response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response: Response, user: User", "language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema,", "response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()),", "tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session =", "if education: education.edu_type = request.edu_type education.name = request.name education.city = request.city education.start_date =", "response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response: Response, user: User", "Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if not", "if dis: dis.level = request.level dis.type = request.type session.commit() session.refresh(dis) return response.status_code =", "for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User", "@app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations", "city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED)", "request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int,", "status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session", "Session = Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"])", "tags=[\"language\"]) def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "@app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences", "Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id ==", "app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def", "prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\")", "LanguageSchema from .schemas.language import LanguageDB from .schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET", "import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session from .database.session import", "def add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "int, request: DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "from .models.disability import Disability as DisabilityModel from .models.education import Education as EducationModel from", ".filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if dis: dis.level = request.level", "= FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication),", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id ==", "description=exp.description, ) for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request:", ") if experience: experience.position = request.position experience.employer = request.employer experience.city = request.city experience.start_date", "edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int,", "get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all()", "): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id:", "experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date,", "dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User =", "education.start_date = request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\",", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id ==", "disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities]", "[DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request:", "Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"])", "import Education as EducationSchema from .schemas.education import EducationDB from .schemas.experience import Experience as", "ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language import Language as LanguageSchema from .schemas.language", "= status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response: Response, user: User =", "user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if dis: dis.level = request.level dis.type =", "@app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session:", ") for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema,", "for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user:", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu)", "Language as LanguageSchema from .schemas.language import LanguageDB from .schemas.user import User, UserCreate, UserDB,", "session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response: Response,", "id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none() ) if", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id", "response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()),", ".filter(ExperienceModel.id == id) .one_or_none() ) if experience: experience.position = request.position experience.employer = request.employer", "user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema, response:", "User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app", "request.city education.start_date = request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND", "user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def", ") if education: education.edu_type = request.edu_type education.name = request.name education.city = request.city education.start_date", "= status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response: Response, user: User =", "): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none() )", "[jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET),", "Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none()", "id) .one_or_none() ) if dis: dis.level = request.level dis.type = request.type session.commit() session.refresh(dis)", "session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)):", "tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async", "import Disability as DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education import Education as", "def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id ==", "EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema,", ") if lang: lang.level = request.level lang.language = request.language session.commit() session.refresh(lang) return response.status_code", "fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session from .database.session import database, user_db from", "EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = EducationModel(**request.dict(),", "edit_user_language( id: int, request: DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang", "import Education as EducationModel from .models.experience import Experience as ExperienceModel from .models.language import", "add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu", "deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if not deleted: response.status_code", "edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id", "Session = Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id ==", "allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\") async def", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [", "= Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if", "session: Session = Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\",", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value,", "Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in", "return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response: Response, user:", "user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu", "session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"],", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id", "UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"])", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id ==", "Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete()", "experience.description = request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience(", "not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User", "employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"],", "edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int,", "== user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, )", "request.level lang.language = request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id)", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu)", "session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema, response: Response,", "request.position experience.employer = request.employer experience.city = request.city experience.start_date = request.start_date experience.end_date = request.end_date", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit()", "EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education", "= Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def", "= request.level lang.language = request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"])", "Session = Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id ==", "fastapi import Depends, FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi_users import", "LanguageModel from .schemas.disability import Disability as DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education", "experience: experience.position = request.position experience.employer = request.employer experience.city = request.city experience.start_date = request.start_date", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id)", ".schemas.disability import DisabilityDB from .schemas.education import Education as EducationSchema from .schemas.education import EducationDB", "start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education(", "for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User", "status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()),", "id) .one_or_none() ) if lang: lang.level = request.level lang.language = request.language session.commit() session.refresh(lang)", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id", "app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware(", "@app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema, response: Response, user: User =", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang = ( session.query(LanguageModel)", "= request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id:", "session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema, response: Response, user: User", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id ==", "= session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date,", "remove_user_education( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session from .database.session import database, user_db", "EducationDB from .schemas.experience import Experience as ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language", "@app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages", "languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session:", "request.edu_type education.name = request.name education.city = request.city education.start_date = request.start_date education.end_date = request.end_date", "deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if not deleted: response.status_code", "Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value,", "from .schemas.disability import Disability as DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education import", "lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate,", "from fastapi import Depends, FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi_users", "= request.end_date experience.description = request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"])", ".one_or_none() ) if dis: dis.level = request.level dis.type = request.type session.commit() session.refresh(dis) return", "ExperienceDB from .schemas.language import Language as LanguageSchema from .schemas.language import LanguageDB from .schemas.user", "user_db from .deps import db_session from .models.disability import Disability as DisabilityModel from .models.education", "def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session:", "= status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response: Response, user: User =", "deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User =", "tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session =", "= request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id:", "= ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete() ) if not", "not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User", "status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "= session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\",", "from .schemas.disability import DisabilityDB from .schemas.education import Education as EducationSchema from .schemas.education import", "if experience: experience.position = request.position experience.employer = request.employer experience.city = request.city experience.start_date =", "remove_user_experience( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response: Response, user: User = Depends(fastapi_users.current_user()),", "Session = Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() )", "user.id) .filter(LanguageModel.id == id) .one_or_none() ) if lang: lang.level = request.level lang.language =", "): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if not deleted:", "id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB])", "return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response: Response, user:", "= Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name,", "= ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if not deleted: response.status_code =", "shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session", "DisabilityDB from .schemas.education import Education as EducationSchema from .schemas.education import EducationDB from .schemas.experience", "( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if dis: dis.level", "request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu =", "ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema,", "status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "= Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id)", "id) .one_or_none() ) if experience: experience.position = request.position experience.employer = request.employer experience.city =", "Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if not", "== id) .one_or_none() ) if lang: lang.level = request.level lang.language = request.language session.commit()", "Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education = (", "import database, user_db from .deps import db_session from .models.disability import Disability as DisabilityModel", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id)", "ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience", "@app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session", "Session = Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id ==", "ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(),", "from fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from", ".schemas.education import EducationDB from .schemas.experience import Experience as ExperienceSchema from .schemas.experience import ExperienceDB", "= request.city education.start_date = request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code =", "user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for", "app = FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, )", "session: Session = Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id", "request: LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value)", "from .schemas.education import EducationDB from .schemas.experience import Experience as ExperienceSchema from .schemas.experience import", "end_date=edu.end_date, ) for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request:", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return", "= Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer,", "id: int, request: DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id)", "Depends, FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers from", "== user.id) .filter(LanguageModel.id == id) .one_or_none() ) if lang: lang.level = request.level lang.language", ".schemas.language import Language as LanguageSchema from .schemas.language import LanguageDB from .schemas.user import User,", "from .models.experience import Experience as ExperienceModel from .models.language import Language as LanguageModel from", "deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User =", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id ==", "response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis =", "@app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\",", "tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session =", "= status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session:", "= session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\",", "[ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in", "tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations =", "as EducationSchema from .schemas.education import EducationDB from .schemas.experience import Experience as ExperienceSchema from", ".schemas.experience import ExperienceDB from .schemas.language import Language as LanguageSchema from .schemas.language import LanguageDB", ".schemas.disability import Disability as DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education import Education", "start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def", "user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema, response:", "return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response: Response, user:", "in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()),", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id", "EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations ]", "session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema, response: Response, user:", "= Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id)", "== user.id) .filter(ExperienceModel.id == id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND", "( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete() ) if not deleted:", "def add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return", "== user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED)", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [", "as DisabilityModel from .models.education import Education as EducationModel from .models.experience import Experience as", "Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education(", "( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND", "edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"],", "session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none() ) if lang: lang.level =", "session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if dis: dis.level =", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id ==", "edit_user_language( id: int, request: LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities =", "user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def", "request.level dis.type = request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def", "allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\") async", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id ==", "= ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete() ) if not", "session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"],", "Education as EducationModel from .models.experience import Experience as ExperienceModel from .models.language import Language", "import JWTAuthentication from sqlalchemy.orm import Session from .database.session import database, user_db from .deps", "Language as LanguageModel from .schemas.disability import Disability as DisabilitySchema from .schemas.disability import DisabilityDB", "def edit_user_experience( id: int, request: ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences =", "user.id).filter(EducationModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\",", "= Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id)", "def startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB])", "session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete() ) if not deleted: response.status_code", "@app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id ==", "request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu =", "if lang: lang.level = request.level lang.language = request.language session.commit() session.refresh(lang) return response.status_code =", ") if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id", "status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session", "from .schemas.experience import ExperienceDB from .schemas.language import Language as LanguageSchema from .schemas.language import", "Education as EducationSchema from .schemas.education import EducationDB from .schemas.experience import Experience as ExperienceSchema", "import Language as LanguageSchema from .schemas.language import LanguageDB from .schemas.user import User, UserCreate,", "session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, )", "import Depends, FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers", "== id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"],", "LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = LanguageModel(**request.dict(),", "response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()),", "Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language(", ".one_or_none() ) if lang: lang.level = request.level lang.language = request.language session.commit() session.refresh(lang) return", "level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user:", "== user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if experience: experience.position = request.position experience.employer", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education = ( session.query(EducationModel)", "languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages]", "experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()),", "lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none() ) if", "if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user:", ".schemas.language import LanguageDB from .schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\")", "session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)):", "@app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema, response: Response, user: User =", "import os from fastapi import Depends, FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware", "from fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session from", "): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id:", "tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"]", "experience.employer = request.employer experience.city = request.city experience.start_date = request.start_date experience.end_date = request.end_date experience.description", "== id) .one_or_none() ) if experience: experience.position = request.position experience.employer = request.employer experience.city", "Session = Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() )", "= request.employer experience.city = request.city experience.start_date = request.start_date experience.end_date = request.end_date experience.description =", ".models.language import Language as LanguageModel from .schemas.disability import Disability as DisabilitySchema from .schemas.disability", "experience.position = request.position experience.employer = request.employer experience.city = request.city experience.start_date = request.start_date experience.end_date", "response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id", "as ExperienceModel from .models.language import Language as LanguageModel from .schemas.disability import Disability as", "tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"])", "= status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response: Response, user: User =", "educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date,", "session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response:", "session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema, response: Response, user:", "int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted", "os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers( user_db,", "session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if experience: experience.position =", "import Language as LanguageModel from .schemas.disability import Disability as DisabilitySchema from .schemas.disability import", "tags=[\"education\"]) def remove_user_education( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "from .database.session import database, user_db from .deps import db_session from .models.disability import Disability", "Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience(", "= request.edu_type education.name = request.name education.city = request.city education.start_date = request.start_date education.end_date =", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id)", "EducationSchema from .schemas.education import EducationDB from .schemas.experience import Experience as ExperienceSchema from .schemas.experience", ".filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if experience: experience.position = request.position", "fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm", "dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if", "@app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema, response: Response, user: User =", "prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\",", "async def startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"],", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id,", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB(", "DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education import Education as EducationSchema from .schemas.education", "request.employer experience.city = request.city experience.start_date = request.start_date experience.end_date = request.end_date experience.description = request.description", "return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language(", "import User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\")", "== user.id).filter(LanguageModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit()", "from .deps import db_session from .models.disability import Disability as DisabilityModel from .models.education import", "= os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers(", "DisabilityModel from .models.education import Education as EducationModel from .models.experience import Experience as ExperienceModel", "session: Session = Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id", "session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema, response: Response, user:", "session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete() ) if not deleted: response.status_code", "user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if experience: experience.position = request.position experience.employer =", "id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"],", "status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED)", "= LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request:", "id: int, request: ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", "type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema,", "Disability as DisabilityModel from .models.education import Education as EducationModel from .models.experience import Experience", "session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id,", "session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none() ) if education: education.edu_type =", "request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience =", "= JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User,", "def edit_user_language( id: int, request: DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND", "deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User =", "response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience =", "request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int,", "CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session", "tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()),", "= request.level dis.type = request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"])", "): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none() )", "experience.start_date = request.start_date experience.end_date = request.end_date experience.description = request.description session.commit() session.refresh(experience) return response.status_code", "@app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session", ".filter(ExperienceModel.id == id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit()", "allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu)", "): education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none() )", "session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema, response: Response, user: User", "Experience as ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language import Language as LanguageSchema", "== id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\",", "session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response: Response,", "@app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities", "from .schemas.experience import Experience as ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language import", "session: Session = Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id", "if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user:", "= Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id)", "def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id ==", "id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations ] @app.post(\"/user/education\",", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id,", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit()", "import EducationDB from .schemas.experience import Experience as ExperienceSchema from .schemas.experience import ExperienceDB from", "from .schemas.language import LanguageDB from .schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET =", ") if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def", "( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none() ) if education: education.edu_type", "from sqlalchemy.orm import Session from .database.session import database, user_db from .deps import db_session", "education: education.edu_type = request.edu_type education.name = request.name education.city = request.city education.start_date = request.start_date", "FastAPI, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication", "session: Session = Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id", "@app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "id: int, request: EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", "request.city experience.start_date = request.start_date experience.end_date = request.end_date experience.description = request.description session.commit() session.refresh(experience) return", ".filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none() ) if lang: lang.level = request.level", "status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response: Response, user: User = Depends(fastapi_users.current_user()),", "Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = (", "session: Session = Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\",", "request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int,", "def remove_user_experience( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", "Disability as DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education import Education as EducationSchema", "get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all()", "tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session =", "session: Session = Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete()", "): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id:", "= Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def", "session: Session = Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id", "tags=[\"experience\"]) def remove_user_experience( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "user.id) .filter(EducationModel.id == id) .one_or_none() ) if education: education.edu_type = request.edu_type education.name =", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return", "@app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session", "DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(),", ".one_or_none() ) if experience: experience.position = request.position experience.employer = request.employer experience.city = request.city", "import Session from .database.session import database, user_db from .deps import db_session from .models.disability", "prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware,", ".one_or_none() ) if education: education.edu_type = request.edu_type education.name = request.name education.city = request.city", "request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu =", "experience.end_date = request.end_date experience.description = request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\",", ".delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB])", "session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response: Response,", "== id) .one_or_none() ) if dis: dis.level = request.level dis.type = request.type session.commit()", "get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all()", ".filter(LanguageModel.id == id) .one_or_none() ) if lang: lang.level = request.level lang.language = request.language", "position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in experiences ] @app.post(\"/user/experience\",", "= ( session.query(EducationModel) .filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none() ) if education:", "import Experience as ExperienceModel from .models.language import Language as LanguageModel from .schemas.disability import", "UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app =", "experience.city = request.city experience.start_date = request.start_date experience.end_date = request.end_date experience.description = request.description session.commit()", "education.name = request.name education.city = request.city education.start_date = request.start_date education.end_date = request.end_date session.commit()", "import Experience as ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language import Language as", "session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description,", "== user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED)", "= EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request:", "FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session from .database.session import database,", ".models.experience import Experience as ExperienceModel from .models.language import Language as LanguageModel from .schemas.disability", "Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language(", "tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()),", "user.id).filter(LanguageModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\",", ") if dis: dis.level = request.level dis.type = request.type session.commit() session.refresh(dis) return response.status_code", "EducationModel from .models.experience import Experience as ExperienceModel from .models.language import Language as LanguageModel", "if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user:", "import ExperienceDB from .schemas.language import Language as LanguageSchema from .schemas.language import LanguageDB from", "return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response: Response, user:", "database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)):", "app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] )", "from .models.language import Language as LanguageModel from .schemas.disability import Disability as DisabilitySchema from", "user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"])", "response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang =", ".filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete() ) if not deleted: response.status_code =", "( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete() ) if not deleted:", "disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()), session:", "int, request: LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(ExperienceModel)", ".filter(DisabilityModel.id == id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit()", "UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\",", "response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id", "status from fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication", "db_session from .models.disability import Disability as DisabilityModel from .models.education import Education as EducationModel", "fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"])", "in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User =", "Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete()", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language,", "Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none()", "session: Session = Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete()", "end_date=exp.end_date, description=exp.description, ) for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience(", "return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language(", "session: Session = Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id", ") app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"])", "session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return", "in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user: User = Depends(fastapi_users.current_user()),", ".models.education import Education as EducationModel from .models.experience import Experience as ExperienceModel from .models.language", "from .models.education import Education as EducationModel from .models.experience import Experience as ExperienceModel from", "@app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int,", "== user.id) .filter(EducationModel.id == id) .one_or_none() ) if education: education.edu_type = request.edu_type education.name", "tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"],", ".models.disability import Disability as DisabilityModel from .models.education import Education as EducationModel from .models.experience", "session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id: int, response:", "= Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id)", "remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "[ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations", "dis: dis.level = request.level dis.type = request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND", "JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate,", "edit_user_education( id: int, request: EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id", ".filter(EducationModel.user_id == user.id) .filter(EducationModel.id == id) .one_or_none() ) if education: education.edu_type = request.edu_type", "sqlalchemy.orm import Session from .database.session import database, user_db from .deps import db_session from", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education = ( session.query(EducationModel) .filter(EducationModel.user_id == user.id)", "database, user_db from .deps import db_session from .models.disability import Disability as DisabilityModel from", "as LanguageModel from .schemas.disability import Disability as DisabilitySchema from .schemas.disability import DisabilityDB from", "import CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import", ".filter(DisabilityModel.id == id) .one_or_none() ) if dis: dis.level = request.level dis.type = request.type", "): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete() )", "User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\",", "== user.id) .filter(DisabilityModel.id == id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND", "education.edu_type = request.edu_type education.name = request.name education.city = request.city education.start_date = request.start_date education.end_date", "add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu", "status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "= ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if not deleted: response.status_code =", "= ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if experience:", "UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI()", "Session = Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id ==", "response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted =", "= status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session:", "level=dis.level.value) for dis in disabilities] @app.post(\"/user/disability\", tags=[\"disability\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: DisabilitySchema, user:", "[LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request:", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit()", "def remove_user_education( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", "app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(),", "city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB(", "app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True,", "@app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User =", "add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience", "= Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value) for dis", "DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema,", "request: ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "== id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"],", "session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema, response: Response,", "session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)):", "allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown():", "Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang = (", "user.id) .filter(ExperienceModel.id == id) .delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return", "lang: lang.level = request.level lang.language = request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND", ".database.session import database, user_db from .deps import db_session from .models.disability import Disability as", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id", "Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position,", "LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema,", "@app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "= request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def remove_user_education( id:", "): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .delete() )", "Depends(db_session), ): experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none()", ".schemas.experience import Experience as ExperienceSchema from .schemas.experience import ExperienceDB from .schemas.language import Language", "= ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request:", "as LanguageSchema from .schemas.language import LanguageDB from .schemas.user import User, UserCreate, UserDB, UserUpdate", "ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in experiences", "] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session:", "response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response: Response, user: User", "dis.type = request.type session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language(", "jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication],", "session: Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\",", "experience = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id) .one_or_none() ) if", "fastapi_users import FastAPIUsers from fastapi_users.authentication import JWTAuthentication from sqlalchemy.orm import Session from .database.session", "JWTAuthentication from sqlalchemy.orm import Session from .database.session import database, user_db from .deps import", "FastAPI() fastapi_users = FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\",", "import DisabilityDB from .schemas.education import Education as EducationSchema from .schemas.education import EducationDB from", "tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()),", "request: DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = (", "def add_user_experience( request: ExperienceSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "lang in languages] @app.post(\"/user/language\", tags=[\"language\"], status_code=status.HTTP_201_CREATED) def add_user_language( request: LanguageSchema, user: User =", "Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id == user.id).all() return [ ExperienceDB( id=exp.id, position=exp.position, employer=exp.employer, city=exp.city,", "= request.city experience.start_date = request.start_date experience.end_date = request.end_date experience.description = request.description session.commit() session.refresh(experience)", "from .schemas.language import Language as LanguageSchema from .schemas.language import LanguageDB from .schemas.user import", "= status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session:", "user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"]) def edit_user_language( id: int, request: LanguageSchema, response:", "status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session", "DisabilitySchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis", "= FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(),", "import LanguageDB from .schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication", "APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users =", "== user.id).filter(EducationModel.id == id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit()", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu)", "status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()),", "Session = Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/language\", tags=[\"language\"])", "Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none()", "id) .one_or_none() ) if education: education.edu_type = request.edu_type education.name = request.name education.city =", "= Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang", "in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User =", "educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()),", "Session = Depends(db_session), ): dis = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id ==", "tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await", "= request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\", tags=[\"language\"]) def remove_user_language( id:", ".deps import db_session from .models.disability import Disability as DisabilityModel from .models.education import Education", "== user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for", "session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def edit_user_experience( id: int, request: ExperienceSchema, response: Response, user:", "def get_user_experience(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): experiences = session.query(ExperienceModel).filter(ExperienceModel.user_id ==", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id)", "Session from .database.session import database, user_db from .deps import db_session from .models.disability import", "id: int, request: LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", ".filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete() ) if not deleted: response.status_code =", "= session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date,", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis = ( session.query(DisabilityModel)", "def add_user_education( request: EducationSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ):", "Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"])", "= ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .one_or_none() ) if dis:", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience)", "= ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none() ) if lang:", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id", "= request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"])", "Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for", ") for edu in educations ] @app.post(\"/user/education\", tags=[\"education\"], status_code=status.HTTP_201_CREATED) def add_user_education( request: EducationSchema,", ".schemas.education import Education as EducationSchema from .schemas.education import EducationDB from .schemas.experience import Experience", "prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_users_router(), prefix=\"/users\", tags=[\"users\"]) app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"],", "FastAPIUsers( user_db, [jwt_authentication], User, UserCreate, UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\",", "session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema, response: Response,", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id)", "edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int,", "): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id:", "): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none() )", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ( session.query(ExperienceModel)", "= request.position experience.employer = request.employer experience.city = request.city experience.start_date = request.start_date experience.end_date =", "exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user: User", "edit_user_experience( id: int, request: ExperienceSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session =", "await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user:", "add_user_language( request: LanguageSchema, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu", "return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session =", "LanguageDB from .schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication =", "session: Session = Depends(db_session)): disabilities = session.query(DisabilityModel).filter(DisabilityModel.user_id == user.id).all() return [DisabilityDB(id=dis.id, type=dis.type.value, level=dis.level.value)", "Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi_users import FastAPIUsers from fastapi_users.authentication import", "session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def edit_user_education( id: int, request: EducationSchema, response: Response, user: User", "UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600, tokenUrl=\"/auth/jwt/login\") app = FastAPI() fastapi_users", ") @app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect()", "request.start_date experience.end_date = request.end_date experience.description = request.description session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND", "response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id", "session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id,", ") if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/education\", tags=[\"education\"], response_model=list[EducationDB]) def", "): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id == id).delete() ) if not deleted:", "session.commit() session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response:", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id)", "async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User = Depends(fastapi_users.current_user()),", "not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User", "id=exp.id, position=exp.position, employer=exp.employer, city=exp.city, start_date=exp.start_date, end_date=exp.end_date, description=exp.description, ) for exp in experiences ]", "return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city, start_date=edu.start_date, end_date=edu.end_date, ) for edu in", "startup(): await database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def", "as EducationModel from .models.experience import Experience as ExperienceModel from .models.language import Language as", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(DisabilityModel)", "UserUpdate, UserDB, ) app.include_router(fastapi_users.get_auth_router(jwt_authentication), prefix=\"/auth/jwt\", tags=[\"auth\"]) app.include_router(fastapi_users.get_register_router(), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_reset_password_router(APP_SECRET), prefix=\"/auth\", tags=[\"auth\"]) app.include_router(fastapi_users.get_verify_router(APP_SECRET),", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = DisabilityModel(**request.dict(), user_id=user.id) session.add(edu) session.commit()", "def edit_user_language( id: int, request: LanguageSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session:", "request.name education.city = request.city education.start_date = request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return", "session.commit() session.refresh(experience) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response:", "User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience)", ".schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET, lifetime_seconds=3600,", "session.refresh(dis) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/disability\", tags=[\"disability\"]) def remove_user_language( id: int, response: Response,", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(LanguageModel).filter(LanguageModel.user_id == user.id).filter(LanguageModel.id", "= Depends(db_session), ): deleted = ( session.query(ExperienceModel) .filter(ExperienceModel.user_id == user.id) .filter(ExperienceModel.id == id)", "app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await database.connect()", "for exp in experiences ] @app.post(\"/user/experience\", tags=[\"experience\"], status_code=status.HTTP_201_CREATED) def add_user_experience( request: ExperienceSchema, user:", "= Depends(db_session), ): experience = ExperienceModel(**request.dict(), user_id=user.id) session.add(experience) session.commit() session.refresh(experience) @app.put(\"/user/experience\", tags=[\"experience\"]) def", "from .schemas.user import User, UserCreate, UserDB, UserUpdate APP_SECRET = os.getenv(\"APP_SECRET\") jwt_authentication = JWTAuthentication(secret=APP_SECRET,", "= Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id", "( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id == id) .one_or_none() ) if lang: lang.level", "lang.level = request.level lang.language = request.language session.commit() session.refresh(lang) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/language\",", "database.connect() @app.on_event(\"shutdown\") async def shutdown(): await database.disconnect() @app.get(\"/user/experience\", tags=[\"experience\"], response_model=list[ExperienceDB]) def get_user_experience(user: User", "import Disability as DisabilityModel from .models.education import Education as EducationModel from .models.experience import", "return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session =", "int, request: EducationSchema, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session),", "= Depends(db_session), ): edu = EducationModel(**request.dict(), user_id=user.id) session.add(edu) session.commit() session.refresh(edu) @app.put(\"/user/education\", tags=[\"education\"]) def", "id).delete() ) if not deleted: response.status_code = status.HTTP_404_NOT_FOUND return session.commit() @app.get(\"/user/disability\", tags=[\"disability\"], response_model=list[DisabilityDB])", "session.refresh(edu) @app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema, response: Response, user: User", "get_user_education(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all()", "Depends(db_session)): educations = session.query(EducationModel).filter(EducationModel.user_id == user.id).all() return [ EducationDB( id=edu.id, edu_type=edu.edu_type.value, name=edu.name, city=edu.city,", "tags=[\"disability\"]) def remove_user_language( id: int, response: Response, user: User = Depends(fastapi_users.current_user()), session: Session", "Session = Depends(db_session), ): lang = ( session.query(LanguageModel) .filter(LanguageModel.user_id == user.id) .filter(LanguageModel.id ==", "from .schemas.education import Education as EducationSchema from .schemas.education import EducationDB from .schemas.experience import", "return session.commit() @app.get(\"/user/language\", tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session =", "Depends(db_session)): languages = session.query(LanguageModel).filter(LanguageModel.user_id == user.id).all() return [LanguageDB(id=lang.id, language=lang.language, level=lang.level.value) for lang in", "@app.put(\"/user/disability\", tags=[\"disability\"]) def edit_user_language( id: int, request: DisabilitySchema, response: Response, user: User =", "Experience as ExperienceModel from .models.language import Language as LanguageModel from .schemas.disability import Disability", "as DisabilitySchema from .schemas.disability import DisabilityDB from .schemas.education import Education as EducationSchema from", "= request.start_date experience.end_date = request.end_date experience.description = request.description session.commit() session.refresh(experience) return response.status_code =", "response: Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): education =", "request.start_date education.end_date = request.end_date session.commit() session.refresh(education) return response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/education\", tags=[\"education\"]) def", "Response, user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): dis = (", "= Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id == id).delete() ) if", "CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"] ) @app.on_event(\"startup\") async def startup(): await database.connect() @app.on_event(\"shutdown\")", "response.status_code = status.HTTP_404_NOT_FOUND @app.delete(\"/user/experience\", tags=[\"experience\"]) def remove_user_experience( id: int, response: Response, user: User", "Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): deleted = ( session.query(EducationModel).filter(EducationModel.user_id == user.id).filter(EducationModel.id ==", "user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session), ): edu = LanguageModel(**request.dict(), user_id=user.id)", "tags=[\"language\"], response_model=list[LanguageDB]) def get_user_language(user: User = Depends(fastapi_users.current_user()), session: Session = Depends(db_session)): languages =", "deleted = ( session.query(DisabilityModel) .filter(DisabilityModel.user_id == user.id) .filter(DisabilityModel.id == id) .delete() ) if" ]
[ "utf-8 -*- # Generated by Django 1.10.1 on 2017-04-15 17:10 from __future__ import", "= 3 CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS,", "17:10 from __future__ import unicode_literals from django.db import migrations class EmailTag: ALL =", "(option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all():", "EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist:", "PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates", "from __future__ import unicode_literals from django.db import migrations class EmailTag: ALL = 0", "UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ]", "except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'),", "in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies", "UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences =", "(UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def", "<gh_stars>1-10 # -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-04-15", "_) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all(): try:", "Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor): TFUser =", "in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences", "( (ALL, 'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team", "-*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-04-15 17:10 from", "2 TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates and New", "UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ] operations = [", "create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user", "user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration):", "# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-04-15 17:10", "= UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ] operations =", "forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self):", "apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences", "import migrations class EmailTag: ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2", "CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'),", "New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor): TFUser", "for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in", "class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ] operations = [ migrations.RunPython(forwards, migrations.RunPython.noop)", "Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ] operations = [ migrations.RunPython(forwards, migrations.RunPython.noop) ]", "user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth',", "Generated by Django 1.10.1 on 2017-04-15 17:10 from __future__ import unicode_literals from django.db", "unicode_literals from django.db import migrations class EmailTag: ALL = 0 UPDATES = 1", "try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [", "'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences", "= apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option,", "'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps,", "EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences", "django.db import migrations class EmailTag: ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS =", "= apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences =", "apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self)", "Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences =", ") def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference')", "EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences except", "# Generated by Django 1.10.1 on 2017-04-15 17:10 from __future__ import unicode_literals from", "and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor):", "'Team Notifications'), ) def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference", "by Django 1.10.1 on 2017-04-15 17:10 from __future__ import unicode_literals from django.db import", "preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ] operations = [ migrations.RunPython(forwards,", "create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences()", "migrations class EmailTag: ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS", "(ALL, 'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'),", "coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-04-15 17:10 from __future__", "class EmailTag: ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS =", "import unicode_literals from django.db import migrations class EmailTag: ALL = 0 UPDATES =", "(TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences')", "(PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), ) def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser')", "preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies = [ ('tf_auth', '0002_auto_20170415_1821'), ] operations", "on 2017-04-15 17:10 from __future__ import unicode_literals from django.db import migrations class EmailTag:", "user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences", "def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def", "ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES", "= ( (ALL, 'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS,", "= apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _)", "1.10.1 on 2017-04-15 17:10 from __future__ import unicode_literals from django.db import migrations class", "TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates and New Features'),", "def create_default_preferences(self): for (option, _) in EmailTag.CHOICES: EmailPreference.objects.create(tag=option, user_email_preferences=self) UserEmailPreferences.create_default_preferences = create_default_preferences for", "3 CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player", "= 2 TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL, 'All'), (UPDATES, 'Updates and", "Notifications'), ) def forwards(apps, schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference =", "0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES = (", "apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in", "__future__ import unicode_literals from django.db import migrations class EmailTag: ALL = 0 UPDATES", "-*- # Generated by Django 1.10.1 on 2017-04-15 17:10 from __future__ import unicode_literals", "= 0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES =", "2017-04-15 17:10 from __future__ import unicode_literals from django.db import migrations class EmailTag: ALL", "schema_editor): TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for", "UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL,", "TFUser = apps.get_model('tf_auth.TFUser') UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option,", "Django 1.10.1 on 2017-04-15 17:10 from __future__ import unicode_literals from django.db import migrations", "from django.db import migrations class EmailTag: ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS", "for user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class", "EmailTag: ALL = 0 UPDATES = 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3", "'All'), (UPDATES, 'Updates and New Features'), (PLAYER_NOTIFICATIONS, 'Player Notifications'), (TEAM_NOTIFICATIONS, 'Team Notifications'), )", "= create_default_preferences for user in TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user)", "= 1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL, 'All'),", "TFUser.objects.all(): try: user.user_email_preferences except UserEmailPreferences.DoesNotExist: preferences = UserEmailPreferences.objects.create(user=user) preferences.create_default_preferences() class Migration(migrations.Migration): dependencies =", "UserEmailPreferences = apps.get_model('tf_auth.UserEmailPreferences') EmailPreference = apps.get_model('tf_auth.EmailPreference') def create_default_preferences(self): for (option, _) in EmailTag.CHOICES:", "1 PLAYER_NOTIFICATIONS = 2 TEAM_NOTIFICATIONS = 3 CHOICES = ( (ALL, 'All'), (UPDATES," ]
[ "input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None grad_input = grad_weight = grad_bias =", "if bias is not None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx,", "return output @staticmethod def backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors input, weight,", "if bias is not None and ctx.needs_input_grad[2]: grad_bias = grad_output.sum(0) return grad_input, grad_weight,", "None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output): input, weight, bias", "bias is not None and ctx.needs_input_grad[2]: grad_bias = grad_output.sum(0) return grad_input, grad_weight, grad_bias", "ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is not None and ctx.needs_input_grad[2]: grad_bias =", "else None) output = input.mm(weight.t()) if bias is not None: output += bias.unsqueeze(0).expand_as(output)", "if bias else None grad_input = grad_weight = grad_bias = None if ctx.needs_input_grad[0]:", "grad_bias = None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input)", "forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None) output =", "if bias else None) output = input.mm(weight.t()) if bias is not None: output", "not None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output): input, weight,", "bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors input,", "= input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None grad_input = grad_weight = grad_bias", "grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is not None", "if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is not None and ctx.needs_input_grad[2]: grad_bias", "<gh_stars>1-10 import torch class Linear(torch.autograd.Function): @staticmethod def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(),", "input.mm(weight.t()) if bias is not None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def", "+= bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors", "is not None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output): input,", "ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None) output = input.mm(weight.t()) if bias is", "input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None) output = input.mm(weight.t())", "= grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is not None and", "grad_weight = grad_output.t().mm(input) if bias is not None and ctx.needs_input_grad[2]: grad_bias = grad_output.sum(0)", "= ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None", "input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None grad_input =", "None) output = input.mm(weight.t()) if bias is not None: output += bias.unsqueeze(0).expand_as(output) return", "else None grad_input = grad_weight = grad_bias = None if ctx.needs_input_grad[0]: grad_input =", "backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(),", "output = input.mm(weight.t()) if bias is not None: output += bias.unsqueeze(0).expand_as(output) return output", "weight.to_dense(), bias.to_dense() if bias else None grad_input = grad_weight = grad_bias = None", "torch class Linear(torch.autograd.Function): @staticmethod def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if", "grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is not None and ctx.needs_input_grad[2]:", "= None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if", "bias.to_dense() if bias else None grad_input = grad_weight = grad_bias = None if", "output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output): input, weight, bias =", "def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None) output", "= input.mm(weight.t()) if bias is not None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod", "None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias", "bias else None) output = input.mm(weight.t()) if bias is not None: output +=", "output @staticmethod def backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors input, weight, bias", "bias = ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias else", "ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is not", "weight, bias = ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias", "weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None) output = input.mm(weight.t()) if", "grad_output.t().mm(input) if bias is not None and ctx.needs_input_grad[2]: grad_bias = grad_output.sum(0) return grad_input,", "bias.to_sparse() if bias else None) output = input.mm(weight.t()) if bias is not None:", "bias is not None: output += bias.unsqueeze(0).expand_as(output) return output @staticmethod def backward(ctx, grad_output):", "= grad_weight = grad_bias = None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]:", "grad_weight = grad_bias = None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight", "input, weight, bias = ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if", "@staticmethod def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None)", "Linear(torch.autograd.Function): @staticmethod def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else", "@staticmethod def backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors input, weight, bias =", "None grad_input = grad_weight = grad_bias = None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight)", "= grad_bias = None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight =", "bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None grad_input = grad_weight =", "bias else None grad_input = grad_weight = grad_bias = None if ctx.needs_input_grad[0]: grad_input", "ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None grad_input", "grad_input = grad_weight = grad_bias = None if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if", "if ctx.needs_input_grad[0]: grad_input = grad_output.mm(weight) if ctx.needs_input_grad[1]: grad_weight = grad_output.t().mm(input) if bias is", "def backward(ctx, grad_output): input, weight, bias = ctx.saved_tensors input, weight, bias = input.to_dense(),", "= grad_output.t().mm(input) if bias is not None and ctx.needs_input_grad[2]: grad_bias = grad_output.sum(0) return", "weight.to_sparse(), bias.to_sparse() if bias else None) output = input.mm(weight.t()) if bias is not", "class Linear(torch.autograd.Function): @staticmethod def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias", "import torch class Linear(torch.autograd.Function): @staticmethod def forward(ctx, input, weight, bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse()", "grad_output): input, weight, bias = ctx.saved_tensors input, weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense()", "bias=None): ctx.save_for_backward(input.to_sparse(), weight.to_sparse(), bias.to_sparse() if bias else None) output = input.mm(weight.t()) if bias", "weight, bias = input.to_dense(), weight.to_dense(), bias.to_dense() if bias else None grad_input = grad_weight" ]
[ "is Money assert str(Rate(50) + Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50) +", "+ Rate(50) + Money(50)).__class__ is Money assert str(Rate(50) + Rate(50) + Money(50)) ==", "Rate(50) > Rate(49) assert Rate(50) + Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__", "123456000, } assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, }", "hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0,", "Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code", "test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, } assert", "'<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with", "471100000, } assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, }", "ConversionError, ExchangeRate, Money, Rate def test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\") ==", "None assert Rate(100).currency_code is None assert Rate(100).amount == 100 assert Rate(100).value == \"100.00\"", "49 assert Rate(50) > Rate(49) assert Rate(50) + Rate(50) == 100 assert (Rate(50)", "100000000, } assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert", "assert Rate(100).currency is None assert Rate(100).currency_code is None assert Rate(100).amount == 100 assert", "stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338,", "== \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string = json.dumps({\"rate\": rate.asdict()}) str(Rate(json.loads(json_string).get(\"rate\"))) ==", "Rate(50) < 51 assert Rate(50) > 49 assert Rate(50) > Rate(49) assert Rate(50)", "Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\")", "assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict():", "0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0, \"nanos\":", "def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100,", "Money, Rate def test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert", "Rate def test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\"))", "0, \"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] ==", "pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert", "assert Rate(100).amount == 100 assert Rate(100).value == \"100.00\" assert Rate(50) < 51 assert", "str(Rate(50) + Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50) + Money(50))", "\"100.00\" assert Rate(50) < 51 assert Rate(50) > 49 assert Rate(50) > Rate(49)", "\"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"]", "Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\"))", "assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1,", "== { \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == {", "== [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with", "'<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__ is Money assert str(Rate(50) +", "test_rate_hashable() -> None: m = stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() ==", "Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError):", "assert Rate(100).value == \"100.00\" assert Rate(50) < 51 assert Rate(50) > 49 assert", "<gh_stars>10-100 import json import pytest import stockholm from stockholm import ConversionError, ExchangeRate, Money,", "assert (Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50)) == \"100.00\" assert", "\"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\":", "[\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError):", "\"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\":", "\"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate", "== ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code is", "Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError):", "import ConversionError, ExchangeRate, Money, Rate def test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\")", "Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is", "\"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\":", "+ Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50) +", "0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate =", "test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\"", "== 100 assert (Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50)) ==", "\"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__ is Money assert str(Rate(50) + Rate(50)", "def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) ==", "Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money:", "Rate(1).sub_units def test_rate_hashable() -> None: m = stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert", "Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() ->", "== { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == {", "== { \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == {", "with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with", "Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100)", "pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m = stockholm.Rate(0) assert", "\"1338.00\", \"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\":", "} assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"]", "assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string = json.dumps({\"rate\": rate.asdict()})", "is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with", "(Rate(50) + Rate(50) + Money(50)).__class__ is Money assert str(Rate(50) + Rate(50) + Money(50))", "\"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def", "100 assert (Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50)) == \"100.00\"", "+ Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__", "Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert", "Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with", "{ \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\", \"units\",", "import json import pytest import stockholm from stockholm import ConversionError, ExchangeRate, Money, Rate", "Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d", "assert Rate(50) < 51 assert Rate(50) > 49 assert Rate(50) > Rate(49) assert", "+ Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50)", "with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError):", "\"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338,", "Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m =", "from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units", "= {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d))", "assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338, \"nanos\":", "\"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000,", "is None assert Rate(100).currency_code is None assert Rate(100).amount == 100 assert Rate(100).value ==", "\"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0,", "assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] ==", "Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError):", "Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d))", "m = stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\",", "{ \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == { \"value\":", "== \"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50)", "Rate(50) + Money(50)).__class__ is Money assert str(Rate(50) + Rate(50) + Money(50)) == \"150.00\"", "is None assert Rate(100).amount == 100 assert Rate(100).value == \"100.00\" assert Rate(50) <", "assert (Rate(50) + Rate(50) + Money(50)).__class__ is Money assert str(Rate(50) + Rate(50) +", "pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m", "1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\":", "assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code is None assert", "pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with", "+ Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate", "\"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\",", "assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\"))", "d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert", "= stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\":", "Rate(100).amount == 100 assert Rate(100).value == \"100.00\" assert Rate(50) < 51 assert Rate(50)", "pytest import stockholm from stockholm import ConversionError, ExchangeRate, Money, Rate def test_rate(): assert", "str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert", "\"0.10\", \"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert", "def test_rate_hashable() -> None: m = stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict()", "dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) ==", "\"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError):", "test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True)", "dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys() ==", "repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__ is", "== '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates():", "100 assert Rate(100).value == \"100.00\" assert Rate(50) < 51 assert Rate(50) > 49", "pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m = stockholm.Rate(0) assert hash(m) def test_rate_asdict():", "Rate(50) > 49 assert Rate(50) > Rate(49) assert Rate(50) + Rate(50) == 100", "+ Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__ is Money", "\"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000,", "assert str(Rate(50) + Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50) +", "stockholm from stockholm import ConversionError, ExchangeRate, Money, Rate def test_rate(): assert Rate(100) ==", "\"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000,", "repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert", "assert Rate(50) + Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__ is Rate assert", "Rate(50) + Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50)", "assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__", "1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\":", "> Rate(49) assert Rate(50) + Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__ is", "Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) +", "str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string", "== 100 assert Rate(100).value == \"100.00\" assert Rate(50) < 51 assert Rate(50) >", "51 assert Rate(50) > 49 assert Rate(50) > Rate(49) assert Rate(50) + Rate(50)", "is Rate assert str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50)) ==", "\"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0,", "+ Money(50)).__class__ is Money assert str(Rate(50) + Rate(50) + Money(50)) == \"150.00\" assert", "\"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338", "\"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def", "== 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d =", "Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency", "Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100) assert Rate(Money(100)).__class__ is Rate def", "} assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert", "Money(50)).__class__ is Money assert str(Rate(50) + Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50)", "assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1,", "== \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711,", "Rate(49) assert Rate(50) + Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__ is Rate", "with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m = stockholm.Rate(0) assert hash(m) def", "ExchangeRate, Money, Rate def test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\")", "assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\")", "== \"150.00\" assert repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100))", "Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__ is Money assert", "test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\"", "str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string = json.dumps({\"rate\": rate.asdict()}) str(Rate(json.loads(json_string).get(\"rate\")))", "assert Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert", "Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50))", "== 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is", "Money assert str(Rate(50) + Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50)", "with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m = stockholm.Rate(0)", "100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None", "currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError):", "import stockholm from stockholm import ConversionError, ExchangeRate, Money, Rate def test_rate(): assert Rate(100)", "assert Rate(50) > 49 assert Rate(50) > Rate(49) assert Rate(50) + Rate(50) ==", "assert Rate(50) > Rate(49) assert Rate(50) + Rate(50) == 100 assert (Rate(50) +", "from stockholm import ConversionError, ExchangeRate, Money, Rate def test_rate(): assert Rate(100) == 100", "ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code is None", "assert str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">'", "(Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50)", "None assert Rate(100).amount == 100 assert Rate(100).value == \"100.00\" assert Rate(50) < 51", "\"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) +", "> 49 assert Rate(50) > Rate(49) assert Rate(50) + Rate(50) == 100 assert", "pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError):", "pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable()", "Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) ==", "\"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"]", "Rate(1338).keys() == [\"value\", \"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\"", "Rate(100).currency is None assert Rate(100).currency_code is None assert Rate(100).amount == 100 assert Rate(100).value", "\"150.00\" assert repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) ==", "{ \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() == { \"value\":", "None: m = stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\":", "0, } assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, }", "\"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string = json.dumps({\"rate\":", "Rate assert str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50)) == '<stockholm.Rate:", "{\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) ==", "== \"100.00\" assert Rate(50) < 51 assert Rate(50) > 49 assert Rate(50) >", "assert Rate(100).currency_code is None assert Rate(100).amount == 100 assert Rate(100).value == \"100.00\" assert", "currency=\"EUR\") with pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with", "assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys()", "+ Rate(50) == 100 assert (Rate(50) + Rate(50)).__class__ is Rate assert str(Rate(50) +", "< 51 assert Rate(50) > 49 assert Rate(50) > Rate(49) assert Rate(50) +", "== '<stockholm.Rate: \"100.00\">' assert (Rate(50) + Rate(50) + Money(50)).__class__ is Money assert str(Rate(50)", "+ Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">'", "+ Rate(50) + Money(50)) == \"150.00\" assert repr(Rate(50) + Rate(50) + Money(50)) ==", "with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None:", "Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def test_rate_hashable() -> None: m = stockholm.Rate(0) assert hash(m)", "{ \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == { \"value\":", "str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code is None assert Rate(100).amount", "Rate(50)).__class__ is Rate assert str(Rate(50) + Rate(50)) == \"100.00\" assert repr(Rate(50) + Rate(50))", "with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units() with pytest.raises(ConversionError): Rate(1).sub_units def", "== \"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code is None assert Rate(100).amount ==", "assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) == \"4711.1338\" assert Rate(100).currency is None assert", "== { \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, } assert Rate(1338).keys() == [\"value\",", "13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json():", "\"4711.1338\" assert Rate(100).currency is None assert Rate(100).currency_code is None assert Rate(100).amount == 100", "assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict()", "def test_rate(): assert Rate(100) == 100 assert Rate(\"100.50551\") == ExchangeRate(\"100.50551\") assert str(Rate(\"4711.1338\")) ==", "assert repr(Rate(50) + Rate(50) + Money(50)) == '<stockholm.Money: \"150.00\">' assert Rate(Money(100)) == Rate(100)", "-> None: m = stockholm.Rate(0) assert hash(m) def test_rate_asdict(): assert Rate(1338).asdict() == {", "Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, } assert Rate(\"1338.4711\").as_dict() ==", "Rate(100).currency_code is None assert Rate(100).amount == 100 assert Rate(100).value == \"100.00\" assert Rate(50)", "\"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\",", "stockholm import ConversionError, ExchangeRate, Money, Rate def test_rate(): assert Rate(100) == 100 assert", "1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0, \"nanos\":", "== \"13384711.00\" assert str(Rate(d)) == \"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string =", "json import pytest import stockholm from stockholm import ConversionError, ExchangeRate, Money, Rate def", "} assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\", \"units\": 0, \"nanos\": 100000000, } assert", "\"13384711\", \"units\": 13384711, \"nanos\": 0} assert str(Rate.from_dict(d)) == \"13384711.00\" assert str(Rate(d)) == \"13384711.00\"", "== Rate(100) assert Rate(Money(100)).__class__ is Rate def test_bad_rates(): with pytest.raises(ConversionError): Rate(1, currency=\"EUR\") with", "def test_rate_asdict(): assert Rate(1338).asdict() == { \"value\": \"1338.00\", \"units\": 1338, \"nanos\": 0, }", "assert Rate(\"1338.4711\").as_dict() == { \"value\": \"1338.4711\", \"units\": 1338, \"nanos\": 471100000, } assert dict(Rate(\"0.123456\"))", "assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\",", "import pytest import stockholm from stockholm import ConversionError, ExchangeRate, Money, Rate def test_rate():", "Rate(100).value == \"100.00\" assert Rate(50) < 51 assert Rate(50) > 49 assert Rate(50)", "} assert dict(Rate(\"0.123456\")) == { \"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert", "\"units\", \"nanos\"] assert Rate(1338)[\"units\"] == 1338 assert Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"]", "\"value\": \"0.123456\", \"units\": 0, \"nanos\": 123456000, } assert dict(Rate(\"0.1\")) == { \"value\": \"0.10\",", "with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\") with pytest.raises(ConversionError): Rate(1).to_sub_units()", "\"13384711.00\" def test_rate_json(): rate = Rate(\"-999999999999999999.999999999\") json_string = json.dumps({\"rate\": rate.asdict()}) str(Rate(json.loads(json_string).get(\"rate\"))) == \"-999999999999999999.999999999\"", "Rate(1338)[\"value\"] == \"1338.00\" with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\":", "pytest.raises(ConversionError): Rate(Money(1, currency=\"SEK\")) with pytest.raises(ConversionError): Rate(100, from_sub_units=True) with pytest.raises(ConversionError): Rate.from_sub_units(100) with pytest.raises(ConversionError): Rate(1).to_currency(\"SEK\")", "with pytest.raises(KeyError): Rate(1338)[\"does_not_exist\"] def test_rate_from_dict(): d = {\"value\": \"13384711\", \"units\": 13384711, \"nanos\": 0}" ]
[ "\"minimal\" def _make_layout(self): \"\"\"Here is where the ingredients to bake a great plugin", "webview template go \"\"\" pass def _event_loop(self, event): \"\"\"Event getter before every _start", "start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about the plugin \"\"\" self.version", "import PluginBase class Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required", "\"\"\" pass def _start(self): \"\"\"Main loop of the plugin this includes a refresh", "= \"minimal\" def _make_layout(self): \"\"\"Here is where the ingredients to bake a great", "def __init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about the plugin", "<reponame>tomsimonart/LMPM from ..libs.pluginbase import PluginBase class Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args)", "self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is where the ingredients to bake a", "_make_layout(self): \"\"\"Here is where the ingredients to bake a great plugin and webview", "\"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is where the", "to bake a great plugin and webview template go \"\"\" pass def _event_loop(self,", "bake a great plugin and webview template go \"\"\" pass def _event_loop(self, event):", "and webview template go \"\"\" pass def _event_loop(self, event): \"\"\"Event getter before every", "\"\"\"Event getter before every _start cycle \"\"\" pass def _start(self): \"\"\"Main loop of", "informations about the plugin \"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self):", "getter before every _start cycle \"\"\" pass def _start(self): \"\"\"Main loop of the", "def _start(self): \"\"\"Main loop of the plugin this includes a refresh of self.screen", "_plugin_info(self): \"\"\"Required informations about the plugin \"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\"", "= \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is where the ingredients to", "def _make_layout(self): \"\"\"Here is where the ingredients to bake a great plugin and", "event): \"\"\"Event getter before every _start cycle \"\"\" pass def _start(self): \"\"\"Main loop", "pass def _event_loop(self, event): \"\"\"Event getter before every _start cycle \"\"\" pass def", "the plugin \"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is", "every _start cycle \"\"\" pass def _start(self): \"\"\"Main loop of the plugin this", "from ..libs.pluginbase import PluginBase class Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args) def", "ingredients to bake a great plugin and webview template go \"\"\" pass def", "def _plugin_info(self): \"\"\"Required informations about the plugin \"\"\" self.version = \"0.11.0\" self.data_dir =", "\"\"\"Main loop of the plugin this includes a refresh of self.screen \"\"\" pass", "plugin and webview template go \"\"\" pass def _event_loop(self, event): \"\"\"Event getter before", "Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about the", "\"\"\"Required informations about the plugin \"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\" def", "plugin \"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is where", "cycle \"\"\" pass def _start(self): \"\"\"Main loop of the plugin this includes a", "*args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about the plugin \"\"\" self.version =", "\"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is where the ingredients to bake", "template go \"\"\" pass def _event_loop(self, event): \"\"\"Event getter before every _start cycle", "_start(self): \"\"\"Main loop of the plugin this includes a refresh of self.screen \"\"\"", "before every _start cycle \"\"\" pass def _start(self): \"\"\"Main loop of the plugin", "_event_loop(self, event): \"\"\"Event getter before every _start cycle \"\"\" pass def _start(self): \"\"\"Main", "the ingredients to bake a great plugin and webview template go \"\"\" pass", "class Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about", "is where the ingredients to bake a great plugin and webview template go", "about the plugin \"\"\" self.version = \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here", "*args) def _plugin_info(self): \"\"\"Required informations about the plugin \"\"\" self.version = \"0.11.0\" self.data_dir", "..libs.pluginbase import PluginBase class Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self):", "self.version = \"0.11.0\" self.data_dir = \"minimal\" def _make_layout(self): \"\"\"Here is where the ingredients", "great plugin and webview template go \"\"\" pass def _event_loop(self, event): \"\"\"Event getter", "_start cycle \"\"\" pass def _start(self): \"\"\"Main loop of the plugin this includes", "a great plugin and webview template go \"\"\" pass def _event_loop(self, event): \"\"\"Event", "__init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about the plugin \"\"\"", "\"\"\" pass def _event_loop(self, event): \"\"\"Event getter before every _start cycle \"\"\" pass", "def _event_loop(self, event): \"\"\"Event getter before every _start cycle \"\"\" pass def _start(self):", "pass def _start(self): \"\"\"Main loop of the plugin this includes a refresh of", "where the ingredients to bake a great plugin and webview template go \"\"\"", "go \"\"\" pass def _event_loop(self, event): \"\"\"Event getter before every _start cycle \"\"\"", "PluginBase class Plugin(PluginBase): def __init__(self, start, *args): super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations", "super().__init__(start, *args) def _plugin_info(self): \"\"\"Required informations about the plugin \"\"\" self.version = \"0.11.0\"", "\"\"\"Here is where the ingredients to bake a great plugin and webview template" ]
[ "if chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s instead of", "#gene_names = get_readable_gene_identifiers(gene_table) # series with index = Ensembl ID, value = unique", "dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only exon entries df_sel =", "os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a gzip", "#genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding list of intervals", "is_empty_dir(dir_): \"\"\"Tests whether a directory is empty. Note: Also returns True if the", "+ nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement of", "protein-coding genes.) TODO: docstring\"\"\" # get gene names that are guaranteed to be", "key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids]", "-c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns", "empty. Note: Also returns True if the directory doesn't exist. TODO: docstring \"\"\"", "'T': 'A', 'G': 'C', 'C': 'G' } compseq = ''.join([rc[nuc] for nuc in", "as pd from genometools.expression import ExpGeneTable from genometools import gtf import singlecell _LOGGER", "valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if append:", "a nucleotide sequence. TODO: docstring\"\"\" rc = { 'A': 'T', 'T': 'A', 'G':", "for nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file", "genes\" are defined here as all genes on the mitochondrial chromosome. TODO: docstring", "list of all ribosomal genes for a given species. \"Ribosomal genes\" are defined", "bases=None): \"\"\"Return all nucleotide sequences with a given hamming distance.\"\"\" if num_edits >", "path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory is", "A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is None: kmer_list = [] if len(kmer)", "counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name if", "docstring\"\"\" rc = { 'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G' }", "= gene_table.loc[sorted_gene_ids] # dictionary for holding list of intervals for each gene gene_exons", "iv[0] <= cur[1]: if iv[1] > cur[1]: # interval ends after current interval", "subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences (for A/C/G/T alphabet).", "False break mut[pos] = nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file,", "id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table", "kmer_list) if not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with", "def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that primarily use the genes' names.\"\"\"", "file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess", "'G', 'T']: if nuc != seq[pos]: mm = seq[:pos] + nuc + seq[(pos+1):]", "counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name if c == 1 else '%s_%s'", "than the length ' 'of the sequence (%d nt).' % (num_edits, len(seq))) if", "all protein-coding genes whose protein products are a structural component of the small", "try: _, dirnames, filenames = next(os.walk(dir_)) if dirnames or filenames: is_empty = False", "mut[pos] = nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode", "for holding list of intervals for each gene gene_exons = OrderedDict([id_, []] for", "[] if len(kmer) == k: kmer_list.append(kmer) else: for nuc in ['A', 'C', 'G',", "collections import OrderedDict from pkg_resources import resource_string import pandas as pd from genometools.expression", "gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome:", "import pandas as pd from genometools.expression import ExpGeneTable from genometools import gtf import", "[nt for nt in seq] mismatch = [] for comb in itertools.combinations(range(length), num_edits):", "TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc", "= [name if c == 1 else '%s_%s' % (name, gene_ids[i]) for i,", "write_mode = 'ab' with open(output_file, write_mode) as ofh: for f in input_files: with", "df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1 try: gene = gene_table.loc[id_] except KeyError:", "as all protein-coding genes whose protein products are a structural component of the", "#genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome, strand, and then position", "\"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not intervals: return [] # sort intervals", "(name, gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits,", "return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get a dictionary", "concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if append: write_mode = 'ab' with open(output_file,", "in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut = seq_list[:] valid = True", "exon intervals. (Only for protein-coding genes.) TODO: docstring\"\"\" # get gene names that", "import resource_string import pandas as pd from genometools.expression import ExpGeneTable from genometools import", "to be unique #gene_names = get_readable_gene_identifiers(gene_table) # series with index = Ensembl ID,", "series with index = Ensembl ID, value = unique gene name #genes =", "OrderedDict from pkg_resources import resource_string import pandas as pd from genometools.expression import ExpGeneTable", "chunksize=chunksize)): # select only exon entries df_sel = df.loc[df.iloc[:, 2] == 'exon'] #", "pandas as pd from genometools.expression import ExpGeneTable from genometools import gtf import singlecell", "occurrences for each of gene name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids", "= os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n')", "stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences (for", "docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests", "ribosomal genes for a given species. \"Ribosomal genes\" are defined here as all", "nt in seq] mismatch = [] for comb in itertools.combinations(range(length), num_edits): for subs", "\"\"\"Parse GTF file and get a dictionary of gene=>list of exon intervals. (Only", "exon entries df_sel = df.loc[df.iloc[:, 2] == 'exon'] # extract gene IDs gene_ids", "import itertools from collections import OrderedDict from pkg_resources import resource_string import pandas as", "genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get a dictionary of gene=>list of exon", "[gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table =", "functions.\"\"\" import subprocess import logging import os import shutil import stat import itertools", "if iv[0] <= cur[1]: if iv[1] > cur[1]: # interval ends after current", "(%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO:", "code. TODO: docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def", "pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome, strand, and then position sorted_gene_ids =", "(name, c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide", "_LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that primarily use", "distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked to make make more edits (%d)", "is None: bases = set('ACGT') length = len(seq) all_bases = [bases for i", "end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1", "start position intervals = sorted(intervals, key=lambda x:x[0]) merged = [] cur = list(intervals[0])", "after current interval if iv[0] <= cur[1]: if iv[1] > cur[1]: # interval", "[name if c == 1 else '%s_%s' % (name, gene_ids[i]) for i, (name,", "species. \"Ribosomal genes\" are defined here as all protein-coding genes whose protein products", "the user executable flag for a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode |", "'%s_%s' % (name, gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())] return gene_ids def", "+ nuc get_all_kmers(k, var, kmer_list) if not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates", "not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming distance", "iv in intervals[1:]: # interval starts inside/right after current interval if iv[0] <=", "str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only exon entries df_sel = df.loc[df.iloc[:,", "is None: kmer_list = [] if len(kmer) == k: kmer_list.append(kmer) else: for nuc", "= gene_table.index.tolist() gene_ids = [name if c == 1 else '%s_%s' % (name,", "ExpGeneTable): \"\"\"Return unique gene identifiers that primarily use the genes' names.\"\"\" # count", "chromosome: ' '%s instead of %s).', id_, chrom, gene_chrom) else: valid += 1", "= os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n')", "= True for pos, nt in zip(comb, subs): if mut[pos] == nt: valid", "get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get a dictionary of gene=>list of", "path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as fh: return", "sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only exon entries df_sel = df.loc[df.iloc[:, 2]", "df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1 try: gene = gene_table.loc[id_]", "defined here as all genes on the mitochondrial chromosome. TODO: docstring \"\"\" path", "kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if", "in range(num_edits)] seq_list = [nt for nt in seq] mismatch = [] for", "or large ribosomal subunit (including fusion genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root,", "# extract gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_,", "kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming distance 1 to `seq`.", "Also returns True if the directory doesn't exist. TODO: docstring \"\"\" is_empty =", "nucleotide sequences with a given hamming distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked", "as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable flag for", "for pos, nt in zip(comb, subs): if mut[pos] == nt: valid = False", "sort genes by chromosome, strand, and then position sorted_gene_ids = sorted( [id_ for", "None: kmer_list = [] if len(kmer) == k: kmer_list.append(kmer) else: for nuc in", "genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding list of intervals for each", "intervals[1:]: # interval starts inside/right after current interval if iv[0] <= cur[1]: if", "write_mode = 'wb' if append: write_mode = 'ab' with open(output_file, write_mode) as ofh:", "TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path)", "if mut[pos] == nt: valid = False break mut[pos] = nt if valid:", "return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal genes for a", "this gene is not contained in the gene table continue gene_chrom = gene_table.loc[id_,", "mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement of a nucleotide sequence. TODO: docstring\"\"\"", "yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement of a nucleotide sequence. TODO:", "append=False): write_mode = 'wb' if append: write_mode = 'ab' with open(output_file, write_mode) as", "% (name, gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq,", "ExpGeneTable from genometools import gtf import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable):", "be unique #gene_names = get_readable_gene_identifiers(gene_table) # series with index = Ensembl ID, value", "ends after current interval cur[1] = iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur)", "'G', 'T']: var = kmer + nuc get_all_kmers(k, var, kmer_list) if not kmer:", "open(path) as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript code. TODO:", "lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in zip( gene_ids, df_sel.iloc[:, 0],", "gene = gene_table.loc[id_] except KeyError: # this gene is not contained in the", "continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong", "for i in range(num_edits)] seq_list = [nt for nt in seq] mismatch =", "sorted(intervals, key=lambda x:x[0]) merged = [] cur = list(intervals[0]) for iv in intervals[1:]:", "= True try: _, dirnames, filenames = next(os.walk(dir_)) if dirnames or filenames: is_empty", "import shutil import stat import itertools from collections import OrderedDict from pkg_resources import", "mitochondrial genes for a given species. \"Mitochondrial genes\" are defined here as all", "nuc get_all_kmers(k, var, kmer_list) if not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all", "holding list of intervals for each gene gene_exons = OrderedDict([id_, []] for id_", "/ %d exons from valid genes (%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons", "name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name", "chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with", "mut[pos] == nt: valid = False break mut[pos] = nt if valid: mismatch.append(''.join(mut))", "[id_ for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0,", "\"Ribosomal genes\" are defined here as all protein-coding genes whose protein products are", "chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s instead of %s).',", "= sorted(intervals, key=lambda x:x[0]) merged = [] cur = list(intervals[0]) for iv in", "# interval starts inside/right after current interval if iv[0] <= cur[1]: if iv[1]", "start, end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total +=", "for nt in seq] mismatch = [] for comb in itertools.combinations(range(length), num_edits): for", "k: kmer_list.append(kmer) else: for nuc in ['A', 'C', 'G', 'T']: var = kmer", "from genometools.expression import ExpGeneTable from genometools import gtf import singlecell _LOGGER = logging.getLogger(__name__)", "num_edits): for subs in itertools.product(*all_bases): mut = seq_list[:] valid = True for pos,", "for a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates", "pos in range(len(seq)): for nuc in ['A', 'C', 'G', 'T']: if nuc !=", "def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences (for A/C/G/T alphabet). TODO:", "= iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get", "and get a dictionary of gene=>list of exon intervals. (Only for protein-coding genes.)", "'C', 'C': 'G' } compseq = ''.join([rc[nuc] for nuc in seq[::-1]]) return compseq", "import OrderedDict from pkg_resources import resource_string import pandas as pd from genometools.expression import", "doesn't exist. TODO: docstring \"\"\" is_empty = True try: _, dirnames, filenames =", "= set('ACGT') length = len(seq) all_bases = [bases for i in range(num_edits)] seq_list", "(%d) than the length ' 'of the sequence (%d nt).' % (num_edits, len(seq)))", "'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def get_plotly_js():", "gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None):", "valid = True for pos, nt in zip(comb, subs): if mut[pos] == nt:", "get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial genes for a given species. \"Mitochondrial", "the length ' 'of the sequence (%d nt).' % (num_edits, len(seq))) if bases", "a dictionary of gene=>list of exon intervals. (Only for protein-coding genes.) TODO: docstring\"\"\"", "genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only exon entries df_sel", "range(num_edits)] seq_list = [nt for nt in seq] mismatch = [] for comb", "\"\"\"Tests whether a directory is empty. Note: Also returns True if the directory", "# resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a", "for f in input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def", "list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial genes", "genes' names.\"\"\" # count occurrences for each of gene name counts = gene_table['name'].value_counts()", "'T', 'T': 'A', 'G': 'C', 'C': 'G' } compseq = ''.join([rc[nuc] for nuc", "of gene=>list of exon intervals. (Only for protein-coding genes.) TODO: docstring\"\"\" # get", "the mitochondrial chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' %", "intervals: return [] # sort intervals by start position intervals = sorted(intervals, key=lambda", "cur = list(intervals[0]) for iv in intervals[1:]: # interval starts inside/right after current", "if kmer_list is None: kmer_list = [] if len(kmer) == k: kmer_list.append(kmer) else:", "unique gene identifiers that primarily use the genes' names.\"\"\" # count occurrences for", "'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding list of", "= ''.join([rc[nuc] for nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse", "def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial genes for a given species.", "path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory is empty. Note: Also returns True", "\"\"\"Generates all nucleotide sequences with hamming distance 1 to `seq`. TODO: docstring\"\"\" for", "fusion genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species)", "each of gene name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist()", "sequence. TODO: docstring\"\"\" rc = { 'A': 'T', 'T': 'A', 'G': 'C', 'C':", "else '%s_%s' % (name, gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())] return gene_ids", "genes for a given species. \"Ribosomal genes\" are defined here as all protein-coding", "directory doesn't exist. TODO: docstring \"\"\" is_empty = True try: _, dirnames, filenames", "output_file, append=False): write_mode = 'wb' if append: write_mode = 'ab' with open(output_file, write_mode)", "a subprocess for decompressing a gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c", "nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and", "sequences with a given hamming distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked to", "raise ValueError('Asked to make make more edits (%d) than the length ' 'of", "interval ends after current interval cur[1] = iv[1] else: merged.append(cur) cur = list(iv)", "gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in", "ID, value = unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes", "zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1 try: gene", "['A', 'C', 'G', 'T']: if nuc != seq[pos]: mm = seq[:pos] + nuc", "16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable flag for a file.\"\"\" st =", "# select only exon entries df_sel = df.loc[df.iloc[:, 2] == 'exon'] # extract", "get_all_kmers(k, var, kmer_list) if not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide", "data=gene_names) # sort genes by chromosome, strand, and then position sorted_gene_ids = sorted(", "import subprocess import logging import os import shutil import stat import itertools from", "gtf import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers", "gene gene_exons = OrderedDict([id_, []] for id_ in gene_table.index) valid = 0 total", "else: for nuc in ['A', 'C', 'G', 'T']: var = kmer + nuc", "pkg_resources import resource_string import pandas as pd from genometools.expression import ExpGeneTable from genometools", "file \"%s\" in chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str},", "total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not", "seq_list = [nt for nt in seq] mismatch = [] for comb in", "def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming distance 1 to `seq`. TODO:", "[] cur = list(intervals[0]) for iv in intervals[1:]: # interval starts inside/right after", "gene_ids = gene_table.index.tolist() gene_ids = [name if c == 1 else '%s_%s' %", "return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial genes for a", "def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences with a given hamming distance.\"\"\"", "sequence (%d nt).' % (num_edits, len(seq))) if bases is None: bases = set('ACGT')", "k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is None: kmer_list =", "gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1 try: gene =", "for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_,", "each gene gene_exons = OrderedDict([id_, []] for id_ in gene_table.index) valid = 0", "primarily use the genes' names.\"\"\" # count occurrences for each of gene name", "if c == 1 else '%s_%s' % (name, gene_ids[i]) for i, (name, c)", "intervals. (Only for protein-coding genes.) TODO: docstring\"\"\" # get gene names that are", "Note: Also returns True if the directory doesn't exist. TODO: docstring \"\"\" is_empty", "unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome, strand,", "in gene_table.index) valid = 0 total = 0 _LOGGER.info('Parsing GTF file \"%s\" in", "= df.loc[df.iloc[:, 2] == 'exon'] # extract gene IDs gene_ids = df_sel.iloc[:, 8].apply(", "cur[1] = iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'):", "for id_ in gene_table.index) valid = 0 total = 0 _LOGGER.info('Parsing GTF file", "for nuc in ['A', 'C', 'G', 'T']: var = kmer + nuc get_all_kmers(k,", "Ensembl ID, value = unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort", "user executable flag for a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC)", "with a given hamming distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked to make", "interval if iv[0] <= cur[1]: if iv[1] > cur[1]: # interval ends after", "iv[1] > cur[1]: # interval ends after current interval cur[1] = iv[1] else:", "\"\"\" # resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether", "\"\"\" is_empty = True try: _, dirnames, filenames = next(os.walk(dir_)) if dirnames or", "in itertools.product(*all_bases): mut = seq_list[:] valid = True for pos, nt in zip(comb,", "gene name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids =", "len(kmer) == k: kmer_list.append(kmer) else: for nuc in ['A', 'C', 'G', 'T']: var", "a list of all mitochondrial genes for a given species. \"Mitochondrial genes\" are", "_LOGGER.info('%d / %d exons from valid genes (%.1f %%).', valid, total, 100*(valid/float(total))) return", "in intervals[1:]: # interval starts inside/right after current interval if iv[0] <= cur[1]:", "merged.append(cur) cur = list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of", "for each of gene name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids =", "= 'wb' if append: write_mode = 'ab' with open(output_file, write_mode) as ofh: for", "(%d nt).' % (num_edits, len(seq))) if bases is None: bases = set('ACGT') length", "mismatch = [] for comb in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut", "= 0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file) for i, df in", "structural component of the small or large ribosomal subunit (including fusion genes). TODO:", "TODO: docstring\"\"\" rc = { 'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G'", "os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a", "'exon'] # extract gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for", "ribosomal subunit (including fusion genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists',", "seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get a", "ofh: for f in input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024)", "c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences", "all nucleotide sequences with hamming distance 1 to `seq`. TODO: docstring\"\"\" for pos", "gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes =", "gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] #", "IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end", "'of the sequence (%d nt).' % (num_edits, len(seq))) if bases is None: bases", "intervals. TODO: docstring\"\"\" if not intervals: return [] # sort intervals by start", "if nuc != seq[pos]: mm = seq[:pos] + nuc + seq[(pos+1):] yield mm", "'G' } compseq = ''.join([rc[nuc] for nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table,", "break mut[pos] = nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False):", "+= 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from valid genes (%.1f %%).',", "nucleotide sequence. TODO: docstring\"\"\" rc = { 'A': 'T', 'T': 'A', 'G': 'C',", "= { 'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G' } compseq =", "} compseq = ''.join([rc[nuc] for nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file,", "make more edits (%d) than the length ' 'of the sequence (%d nt).'", "for protein-coding genes.) TODO: docstring\"\"\" # get gene names that are guaranteed to", "path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as fh: return", "count occurrences for each of gene name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']]", "'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G' } compseq = ''.join([rc[nuc] for", "sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if append: write_mode = 'ab'", "\"%s\" in chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t',", "= os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing", "from pkg_resources import resource_string import pandas as pd from genometools.expression import ExpGeneTable from", "pos, nt in zip(comb, subs): if mut[pos] == nt: valid = False break", "= unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome,", "in ['A', 'C', 'G', 'T']: var = kmer + nuc get_all_kmers(k, var, kmer_list)", "= df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in zip(", "starts inside/right after current interval if iv[0] <= cur[1]: if iv[1] > cur[1]:", "path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer", "df_sel = df.loc[df.iloc[:, 2] == 'exon'] # extract gene IDs gene_ids = df_sel.iloc[:,", "<= cur[1]: if iv[1] > cur[1]: # interval ends after current interval cur[1]", "chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from", "and then position sorted_gene_ids = sorted( [id_ for id_ in gene_table.index], key=lambda id_:", "chunksize=10000): \"\"\"Parse GTF file and get a dictionary of gene=>list of exon intervals.", "= genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding list of intervals for", "in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes", "\"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all", "abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding list", "hamming distance 1 to `seq`. TODO: docstring\"\"\" for pos in range(len(seq)): for nuc", "f in input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path):", "bases is None: bases = set('ACGT') length = len(seq) all_bases = [bases for", "in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get", "= [] if len(kmer) == k: kmer_list.append(kmer) else: for nuc in ['A', 'C',", "sort intervals by start position intervals = sorted(intervals, key=lambda x:x[0]) merged = []", "subprocess import logging import os import shutil import stat import itertools from collections", "os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def", "comment='#', header=None, chunksize=chunksize)): # select only exon entries df_sel = df.loc[df.iloc[:, 2] ==", "genometools import gtf import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique", "is not contained in the gene table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if", "to make make more edits (%d) than the length ' 'of the sequence", "% (num_edits, len(seq))) if bases is None: bases = set('ACGT') length = len(seq)", "= [bases for i in range(num_edits)] seq_list = [nt for nt in seq]", "docstring\"\"\" # get gene names that are guaranteed to be unique #gene_names =", "0 total = 0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file) for i,", "all mitochondrial genes for a given species. \"Mitochondrial genes\" are defined here as", "mitochondrial chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species)", "protein-coding genes whose protein products are a structural component of the small or", "itertools from collections import OrderedDict from pkg_resources import resource_string import pandas as pd", "decompressing a gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' % path,", "nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement of a", "use the genes' names.\"\"\" # count occurrences for each of gene name counts", "the small or large ribosomal subunit (including fusion genes). TODO: docstring \"\"\" path", "st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for", "as ofh: for f in input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh,", "!= gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s instead of %s).', id_,", "num_edits, bases=None): \"\"\"Return all nucleotide sequences with a given hamming distance.\"\"\" if num_edits", "as all genes on the mitochondrial chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root,", "contained in the gene table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom !=", "species) with open(path) as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript", "def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not intervals: return [] #", "4]): total += 1 try: gene = gene_table.loc[id_] except KeyError: # this gene", "if bases is None: bases = set('ACGT') length = len(seq) all_bases = [bases", "directory is empty. Note: Also returns True if the directory doesn't exist. TODO:", "TODO: docstring \"\"\" is_empty = True try: _, dirnames, filenames = next(os.walk(dir_)) if", "itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut = seq_list[:] valid = True for", "gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from valid genes (%.1f %%).', valid, total,", "name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome, strand, and then", "if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if", "genes for a given species. \"Mitochondrial genes\" are defined here as all genes", "get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\"", "then position sorted_gene_ids = sorted( [id_ for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_,", "seq] mismatch = [] for comb in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases):", "try: gene = gene_table.loc[id_] except KeyError: # this gene is not contained in", "if not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming", "| stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a gzip file. TODO:", "nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode = 'wb'", "nt: valid = False break mut[pos] = nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch)", "of gene name counts = gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids", "nt in zip(comb, subs): if mut[pos] == nt: valid = False break mut[pos]", "[]] for id_ in gene_table.index) valid = 0 total = 0 _LOGGER.info('Parsing GTF", "genes.) TODO: docstring\"\"\" # get gene names that are guaranteed to be unique", "1 try: gene = gene_table.loc[id_] except KeyError: # this gene is not contained", "the directory doesn't exist. TODO: docstring \"\"\" is_empty = True try: _, dirnames,", "# this gene is not contained in the gene table continue gene_chrom =", "of %s).', id_, chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d /", "seq[:pos] + nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement", "%s).', id_, chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d", "return [] # sort intervals by start position intervals = sorted(intervals, key=lambda x:x[0])", "%%).', valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\"", "unique #gene_names = get_readable_gene_identifiers(gene_table) # series with index = Ensembl ID, value =", "# sort intervals by start position intervals = sorted(intervals, key=lambda x:x[0]) merged =", "fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript code. TODO: docstring \"\"\" # resource_string?", "'%s instead of %s).', id_, chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end])", "'A', 'G': 'C', 'C': 'G' } compseq = ''.join([rc[nuc] for nuc in seq[::-1]])", "> cur[1]: # interval ends after current interval cur[1] = iv[1] else: merged.append(cur)", "seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement of a nucleotide sequence.", "gene=>list of exon intervals. (Only for protein-coding genes.) TODO: docstring\"\"\" # get gene", "ignored (wrong chromosome: ' '%s instead of %s).', id_, chrom, gene_chrom) else: valid", "dirnames, filenames = next(os.walk(dir_)) if dirnames or filenames: is_empty = False except StopIteration:", "gene identifiers that primarily use the genes' names.\"\"\" # count occurrences for each", "executable flag for a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def", "%d exons from valid genes (%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons def", "bases = set('ACGT') length = len(seq) all_bases = [bases for i in range(num_edits)]", "genes by chromosome, strand, and then position sorted_gene_ids = sorted( [id_ for id_", "select only exon entries df_sel = df.loc[df.iloc[:, 2] == 'exon'] # extract gene", "given species. \"Mitochondrial genes\" are defined here as all genes on the mitochondrial", "num_edits > len(seq): raise ValueError('Asked to make make more edits (%d) than the", "var, kmer_list) if not kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences", "''.join([rc[nuc] for nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF", "in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1 try:", "def is_empty_dir(dir_): \"\"\"Tests whether a directory is empty. Note: Also returns True if", "except KeyError: # this gene is not contained in the gene table continue", "docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as", "names.\"\"\" # count occurrences for each of gene name counts = gene_table['name'].value_counts() gene_counts", "intervals = sorted(intervals, key=lambda x:x[0]) merged = [] cur = list(intervals[0]) for iv", "def concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if append: write_mode = 'ab' with", "for decompressing a gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' %", "open(output_file, write_mode) as ofh: for f in input_files: with open(f, 'rb') as ifh:", "shutil import stat import itertools from collections import OrderedDict from pkg_resources import resource_string", "== nt: valid = False break mut[pos] = nt if valid: mismatch.append(''.join(mut)) return", "cur = list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all", "all nucleotide sequences with a given hamming distance.\"\"\" if num_edits > len(seq): raise", "= seq_list[:] valid = True for pos, nt in zip(comb, subs): if mut[pos]", "get gene names that are guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table) #", "st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a gzip file.", "iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a", "filenames = next(os.walk(dir_)) if dirnames or filenames: is_empty = False except StopIteration: pass", "+= 1 try: gene = gene_table.loc[id_] except KeyError: # this gene is not", "return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences with a given", "id_, chrom, start, end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]):", "% path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible", "get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that primarily use the genes' names.\"\"\" #", "'mitochondrial_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a", "open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal", "in input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets", "(wrong chromosome: ' '%s instead of %s).', id_, chrom, gene_chrom) else: valid +=", "docstring\"\"\" if not intervals: return [] # sort intervals by start position intervals", "make_file_executable(path): \"\"\"Sets the user executable flag for a file.\"\"\" st = os.stat(path) os.chmod(path,", "merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial genes for", "\"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as fh:", "position intervals = sorted(intervals, key=lambda x:x[0]) merged = [] cur = list(intervals[0]) for", "if len(kmer) == k: kmer_list.append(kmer) else: for nuc in ['A', 'C', 'G', 'T']:", "for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): #", "names that are guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table) # series with", "current interval if iv[0] <= cur[1]: if iv[1] > cur[1]: # interval ends", "gene_table.index) valid = 0 total = 0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...',", "as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal genes", "TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path)", "distance 1 to `seq`. TODO: docstring\"\"\" for pos in range(len(seq)): for nuc in", "gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s", "TODO: docstring\"\"\" if not intervals: return [] # sort intervals by start position", "the gene table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s", "small or large ribosomal subunit (including fusion genes). TODO: docstring \"\"\" path =", "chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None,", "next(os.walk(dir_)) if dirnames or filenames: is_empty = False except StopIteration: pass return is_empty", "identifiers that primarily use the genes' names.\"\"\" # count occurrences for each of", "'wb' if append: write_mode = 'ab' with open(output_file, write_mode) as ofh: for f", "import ExpGeneTable from genometools import gtf import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table:", "by chromosome, strand, and then position sorted_gene_ids = sorted( [id_ for id_ in", "mut = seq_list[:] valid = True for pos, nt in zip(comb, subs): if", "index = Ensembl ID, value = unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names)", "< 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for", "write_mode) as ofh: for f in input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh,", "'ab' with open(output_file, write_mode) as ofh: for f in input_files: with open(f, 'rb')", "from collections import OrderedDict from pkg_resources import resource_string import pandas as pd from", "= subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='',", "os import shutil import stat import itertools from collections import OrderedDict from pkg_resources", "input_files: with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the", "import gtf import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene", "= OrderedDict([id_, []] for id_ in gene_table.index) valid = 0 total = 0", "gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start,", "100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not intervals:", "chromosome, strand, and then position sorted_gene_ids = sorted( [id_ for id_ in gene_table.index],", "(including fusion genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' %", "= nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode =", "' '%s instead of %s).', id_, chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1,", "# sort genes by chromosome, strand, and then position sorted_gene_ids = sorted( [id_", "0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total += 1 try: gene = gene_table.loc[id_] except", "cur[1]: if iv[1] > cur[1]: # interval ends after current interval cur[1] =", "i, (name, c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all", "list of intervals for each gene gene_exons = OrderedDict([id_, []] for id_ in", "in seq] mismatch = [] for comb in itertools.combinations(range(length), num_edits): for subs in", "True try: _, dirnames, filenames = next(os.walk(dir_)) if dirnames or filenames: is_empty =", "== k: kmer_list.append(kmer) else: for nuc in ['A', 'C', 'G', 'T']: var =", "True if the directory doesn't exist. TODO: docstring \"\"\" is_empty = True try:", "id_ in gene_table.index) valid = 0 total = 0 _LOGGER.info('Parsing GTF file \"%s\"", "kmer: return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming distance 1", "of a nucleotide sequence. TODO: docstring\"\"\" rc = { 'A': 'T', 'T': 'A',", "OrderedDict([id_, []] for id_ in gene_table.index) valid = 0 total = 0 _LOGGER.info('Parsing", "dictionary for holding list of intervals for each gene gene_exons = OrderedDict([id_, []]", "2] == 'exon'] # extract gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x:", "genometools.expression import ExpGeneTable from genometools import gtf import singlecell _LOGGER = logging.getLogger(__name__) def", "# series with index = Ensembl ID, value = unique gene name #genes", "all genes on the mitochondrial chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data',", "1 to `seq`. TODO: docstring\"\"\" for pos in range(len(seq)): for nuc in ['A',", "nucleotide sequences with hamming distance 1 to `seq`. TODO: docstring\"\"\" for pos in", "def get_reverse_complement(seq): \"\"\"Returns the reverse complement of a nucleotide sequence. TODO: docstring\"\"\" rc", "shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable flag for a file.\"\"\"", "sorted_gene_ids = sorted( [id_ for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_,", "= 0 total = 0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file) for", "resource_string import pandas as pd from genometools.expression import ExpGeneTable from genometools import gtf", "pd from genometools.expression import ExpGeneTable from genometools import gtf import singlecell _LOGGER =", "x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:,", "'C', 'G', 'T']: if nuc != seq[pos]: mm = seq[:pos] + nuc +", "[] for comb in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut = seq_list[:]", "nuc in ['A', 'C', 'G', 'T']: if nuc != seq[pos]: mm = seq[:pos]", "ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable flag for a", "'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get", "sorted( [id_ for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] <", "docstring\"\"\" if kmer_list is None: kmer_list = [] if len(kmer) == k: kmer_list.append(kmer)", "if iv[1] > cur[1]: # interval ends after current interval cur[1] = iv[1]", "c == 1 else '%s_%s' % (name, gene_ids[i]) for i, (name, c) in", "seq_list[:] valid = True for pos, nt in zip(comb, subs): if mut[pos] ==", "only exon entries df_sel = df.loc[df.iloc[:, 2] == 'exon'] # extract gene IDs", "if not intervals: return [] # sort intervals by start position intervals =", "0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding", "= gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: '", "kmer_list.append(kmer) else: for nuc in ['A', 'C', 'G', 'T']: var = kmer +", "= gene_table.loc[id_] except KeyError: # this gene is not contained in the gene", "3], df_sel.iloc[:, 4]): total += 1 try: gene = gene_table.loc[id_] except KeyError: #", "gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not intervals: return []", "subproc = subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k,", "genes on the mitochondrial chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists',", "docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc def", "subs): if mut[pos] == nt: valid = False break mut[pos] = nt if", "'T']: if nuc != seq[pos]: mm = seq[:pos] + nuc + seq[(pos+1):] yield", "(num_edits, len(seq))) if bases is None: bases = set('ACGT') length = len(seq) all_bases", "intervals by start position intervals = sorted(intervals, key=lambda x:x[0]) merged = [] cur", "a given hamming distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked to make make", "# get gene names that are guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table)", "def make_file_executable(path): \"\"\"Sets the user executable flag for a file.\"\"\" st = os.stat(path)", "gene names that are guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table) # series", "in ['A', 'C', 'G', 'T']: if nuc != seq[pos]: mm = seq[:pos] +", "subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None):", "valid = 0 total = 0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file)", "exons from valid genes (%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals):", "def get_plotly_js(): \"\"\"Return the plotly javascript code. TODO: docstring \"\"\" # resource_string? path", "gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from valid", "'ribosomal_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the", "comb in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut = seq_list[:] valid =", "{ 'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G' } compseq = ''.join([rc[nuc]", "def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a gzip file. TODO: docstring\"\"\" subproc", "for pos in range(len(seq)): for nuc in ['A', 'C', 'G', 'T']: if nuc", "GTF file \"%s\" in chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0:", "for id_, chrom, start, end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:,", "length ' 'of the sequence (%d nt).' % (num_edits, len(seq))) if bases is", "ValueError('Asked to make make more edits (%d) than the length ' 'of the", "'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return", "logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that primarily use the genes'", "sequences with hamming distance 1 to `seq`. TODO: docstring\"\"\" for pos in range(len(seq)):", "of all mitochondrial genes for a given species. \"Mitochondrial genes\" are defined here", "len(seq) all_bases = [bases for i in range(num_edits)] seq_list = [nt for nt", "= next(os.walk(dir_)) if dirnames or filenames: is_empty = False except StopIteration: pass return", "if append: write_mode = 'ab' with open(output_file, write_mode) as ofh: for f in", "from valid genes (%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge", "for a given species. \"Ribosomal genes\" are defined here as all protein-coding genes", "given hamming distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked to make make more", "not intervals: return [] # sort intervals by start position intervals = sorted(intervals,", "a structural component of the small or large ribosomal subunit (including fusion genes).", "genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with", "a given species. \"Mitochondrial genes\" are defined here as all genes on the", "hamming distance.\"\"\" if num_edits > len(seq): raise ValueError('Asked to make make more edits", "subs in itertools.product(*all_bases): mut = seq_list[:] valid = True for pos, nt in", "on the mitochondrial chromosome. TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'mitochondrial_%s.tsv'", "the reverse complement of a nucleotide sequence. TODO: docstring\"\"\" rc = { 'A':", "x:x[0]) merged = [] cur = list(intervals[0]) for iv in intervals[1:]: # interval", "docstring \"\"\" is_empty = True try: _, dirnames, filenames = next(os.walk(dir_)) if dirnames", "'G': 'C', 'C': 'G' } compseq = ''.join([rc[nuc] for nuc in seq[::-1]]) return", "['A', 'C', 'G', 'T']: var = kmer + nuc get_all_kmers(k, var, kmer_list) if", "GTF file and get a dictionary of gene=>list of exon intervals. (Only for", "= 'ab' with open(output_file, write_mode) as ofh: for f in input_files: with open(f,", "enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences with a", "of the small or large ribosomal subunit (including fusion genes). TODO: docstring \"\"\"", "in the gene table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom:", "complement of a nucleotide sequence. TODO: docstring\"\"\" rc = { 'A': 'T', 'T':", "exon ignored (wrong chromosome: ' '%s instead of %s).', id_, chrom, gene_chrom) else:", "that primarily use the genes' names.\"\"\" # count occurrences for each of gene", "return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not intervals: return", "gene is not contained in the gene table continue gene_chrom = gene_table.loc[id_, 'chromosome']", "ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable flag for a file.\"\"\" st", "that are guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table) # series with index", "more edits (%d) than the length ' 'of the sequence (%d nt).' %", "= [] cur = list(intervals[0]) for iv in intervals[1:]: # interval starts inside/right", "from genometools import gtf import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return", "def get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal genes for a given species.", "= get_readable_gene_identifiers(gene_table) # series with index = Ensembl ID, value = unique gene", "kmer_list = [] if len(kmer) == k: kmer_list.append(kmer) else: for nuc in ['A',", "resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory is empty. Note: Also returns", "[] # sort intervals by start position intervals = sorted(intervals, key=lambda x:x[0]) merged", "of exon intervals. (Only for protein-coding genes.) TODO: docstring\"\"\" # get gene names", "gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences with a given hamming", "extract gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom,", "' 'of the sequence (%d nt).' % (num_edits, len(seq))) if bases is None:", "stat import itertools from collections import OrderedDict from pkg_resources import resource_string import pandas", "True for pos, nt in zip(comb, subs): if mut[pos] == nt: valid =", "all ribosomal genes for a given species. \"Ribosomal genes\" are defined here as", "\"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as fh:", "if num_edits > len(seq): raise ValueError('Asked to make make more edits (%d) than", "with open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of all", "in zip(comb, subs): if mut[pos] == nt: valid = False break mut[pos] =", "dictionary of gene=>list of exon intervals. (Only for protein-coding genes.) TODO: docstring\"\"\" #", "overlapping intervals. TODO: docstring\"\"\" if not intervals: return [] # sort intervals by", "key=lambda x:x[0]) merged = [] cur = list(intervals[0]) for iv in intervals[1:]: #", "guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table) # series with index = Ensembl", "'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids] # dictionary", "make make more edits (%d) than the length ' 'of the sequence (%d", "_, dirnames, filenames = next(os.walk(dir_)) if dirnames or filenames: is_empty = False except", "gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name if c ==", "sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is None: kmer_list = []", "open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable", "for i, (name, c) in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return", "entries df_sel = df.loc[df.iloc[:, 2] == 'exon'] # extract gene IDs gene_ids =", "protein products are a structural component of the small or large ribosomal subunit", "8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in zip( gene_ids, df_sel.iloc[:,", "`seq`. TODO: docstring\"\"\" for pos in range(len(seq)): for nuc in ['A', 'C', 'G',", "table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon ignored", "return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if append: write_mode =", "= gene_table['name'].value_counts() gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name if c", "get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming distance 1 to `seq`. TODO: docstring\"\"\"", "\"\"\"Return unique gene identifiers that primarily use the genes' names.\"\"\" # count occurrences", "= 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory is empty.", "exist. TODO: docstring \"\"\" is_empty = True try: _, dirnames, filenames = next(os.walk(dir_))", "% species) with open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list", "return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences (for A/C/G/T", "import logging import os import shutil import stat import itertools from collections import", "instead of %s).', id_, chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d", "= pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome, strand, and then position sorted_gene_ids", "TODO: docstring\"\"\" # get gene names that are guaranteed to be unique #gene_names", "interval starts inside/right after current interval if iv[0] <= cur[1]: if iv[1] >", "inside/right after current interval if iv[0] <= cur[1]: if iv[1] > cur[1]: #", "\"\"\"Returns the reverse complement of a nucleotide sequence. TODO: docstring\"\"\" rc = {", "1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from valid genes (%.1f %%).', valid,", "as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript code. TODO: docstring", "\"\"\"Returns all possible k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is", "= [nt for nt in seq] mismatch = [] for comb in itertools.combinations(range(length),", "'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user executable flag", "KeyError: # this gene is not contained in the gene table continue gene_chrom", "by start position intervals = sorted(intervals, key=lambda x:x[0]) merged = [] cur =", "in enumerate(gene_counts.items())] return gene_ids def get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences with", "get a dictionary of gene=>list of exon intervals. (Only for protein-coding genes.) TODO:", "gene table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon", "import os import shutil import stat import itertools from collections import OrderedDict from", "= False break mut[pos] = nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files,", "gene_table.index.tolist() gene_ids = [name if c == 1 else '%s_%s' % (name, gene_ids[i])", "genes (%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals.", "if the directory doesn't exist. TODO: docstring \"\"\" is_empty = True try: _,", "value = unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes by", "else: merged.append(cur) cur = list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list", "!= seq[pos]: mm = seq[:pos] + nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq):", "gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE)", "i in range(num_edits)] seq_list = [nt for nt in seq] mismatch = []", "1 else '%s_%s' % (name, gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())] return", "cur[1]: # interval ends after current interval cur[1] = iv[1] else: merged.append(cur) cur", "seq[pos]: mm = seq[:pos] + nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns", "compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get a dictionary of", "all_bases = [bases for i in range(num_edits)] seq_list = [nt for nt in", "= len(seq) all_bases = [bases for i in range(num_edits)] seq_list = [nt for", "= logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that primarily use the", "== 1 else '%s_%s' % (name, gene_ids[i]) for i, (name, c) in enumerate(gene_counts.items())]", "nuc in ['A', 'C', 'G', 'T']: var = kmer + nuc get_all_kmers(k, var,", "resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory", "# count occurrences for each of gene name counts = gene_table['name'].value_counts() gene_counts =", "\"\"\"Creates a subprocess for decompressing a gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip", "= list(iv) merged.append(cur) return merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial", "merged = [] cur = list(intervals[0]) for iv in intervals[1:]: # interval starts", "= counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name if c == 1 else", "kmer_list is None: kmer_list = [] if len(kmer) == k: kmer_list.append(kmer) else: for", "merged def get_mitochondrial_genes(species='human'): \"\"\"Get a list of all mitochondrial genes for a given", "total += 1 try: gene = gene_table.loc[id_] except KeyError: # this gene is", "'C', 'G', 'T']: var = kmer + nuc get_all_kmers(k, var, kmer_list) if not", "import stat import itertools from collections import OrderedDict from pkg_resources import resource_string import", "is empty. Note: Also returns True if the directory doesn't exist. TODO: docstring", "gene name #genes = pd.Series(index=gene_table.index, data=gene_names) # sort genes by chromosome, strand, and", "\"\"\"Return all nucleotide sequences with a given hamming distance.\"\"\" if num_edits > len(seq):", "set('ACGT') length = len(seq) all_bases = [bases for i in range(num_edits)] seq_list =", "get_reverse_complement(seq): \"\"\"Returns the reverse complement of a nucleotide sequence. TODO: docstring\"\"\" rc =", "singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that primarily", "id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])])", "get_plotly_js(): \"\"\"Return the plotly javascript code. TODO: docstring \"\"\" # resource_string? path =", "df.loc[df.iloc[:, 2] == 'exon'] # extract gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda", "gene_exons = OrderedDict([id_, []] for id_ in gene_table.index) valid = 0 total =", "for each gene gene_exons = OrderedDict([id_, []] for id_ in gene_table.index) valid =", "nt).' % (num_edits, len(seq))) if bases is None: bases = set('ACGT') length =", "products are a structural component of the small or large ribosomal subunit (including", "given species. \"Ribosomal genes\" are defined here as all protein-coding genes whose protein", "gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s instead of %s).', id_, chrom,", "get_edit_sequences(seq, num_edits, bases=None): \"\"\"Return all nucleotide sequences with a given hamming distance.\"\"\" if", "len(seq))) if bases is None: bases = set('ACGT') length = len(seq) all_bases =", "== 'exon'] # extract gene IDs gene_ids = df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id'])", "position sorted_gene_ids = sorted( [id_ for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'],", "with open(f, 'rb') as ifh: shutil.copyfileobj(ifh, ofh, 16*1024*1024) def make_file_executable(path): \"\"\"Sets the user", "\"\"\"Get a list of all mitochondrial genes for a given species. \"Mitochondrial genes\"", "rc = { 'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G' } compseq", "kmer + nuc get_all_kmers(k, var, kmer_list) if not kmer: return kmer_list def get_mismatch_sequences(seq):", "get_readable_gene_identifiers(gene_table) # series with index = Ensembl ID, value = unique gene name", "= seq[:pos] + nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse", "gene_ids = [name if c == 1 else '%s_%s' % (name, gene_ids[i]) for", "= [] for comb in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut =", "valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if", "_LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s instead of %s).', id_, chrom, gene_chrom)", "gene_table.loc[sorted_gene_ids] # dictionary for holding list of intervals for each gene gene_exons =", "subprocess for decompressing a gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"'", "valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from valid genes (%.1f", "with open(output_file, write_mode) as ofh: for f in input_files: with open(f, 'rb') as", "alphabet). TODO: docstring\"\"\" if kmer_list is None: kmer_list = [] if len(kmer) ==", "i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select", "possible k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is None: kmer_list", "chrom, start, end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3], df_sel.iloc[:, 4]): total", "for a given species. \"Mitochondrial genes\" are defined here as all genes on", "are a structural component of the small or large ribosomal subunit (including fusion", "list(intervals[0]) for iv in intervals[1:]: # interval starts inside/right after current interval if", "'chromosome'] if chrom != gene_chrom: _LOGGER.warning('%s exon ignored (wrong chromosome: ' '%s instead", "def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000): \"\"\"Parse GTF file and get a dictionary of gene=>list", "> len(seq): raise ValueError('Asked to make make more edits (%d) than the length", "list of all mitochondrial genes for a given species. \"Mitochondrial genes\" are defined", "is_empty = True try: _, dirnames, filenames = next(os.walk(dir_)) if dirnames or filenames:", "of intervals for each gene gene_exons = OrderedDict([id_, []] for id_ in gene_table.index)", "for comb in itertools.combinations(range(length), num_edits): for subs in itertools.product(*all_bases): mut = seq_list[:] valid", "valid = False break mut[pos] = nt if valid: mismatch.append(''.join(mut)) return sorted(mismatch) def", "for subs in itertools.product(*all_bases): mut = seq_list[:] valid = True for pos, nt", "df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only", "docstring\"\"\" for pos in range(len(seq)): for nuc in ['A', 'C', 'G', 'T']: if", "total = 0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file) for i, df", "header=None, chunksize=chunksize)): # select only exon entries df_sel = df.loc[df.iloc[:, 2] == 'exon']", "in chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#',", "file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' % path, shell=True, stdout=subprocess.PIPE) return", "to `seq`. TODO: docstring\"\"\" for pos in range(len(seq)): for nuc in ['A', 'C',", "'T']: var = kmer + nuc get_all_kmers(k, var, kmer_list) if not kmer: return", "id_, chrom, gene_chrom) else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons", "the sequence (%d nt).' % (num_edits, len(seq))) if bases is None: bases =", "are guaranteed to be unique #gene_names = get_readable_gene_identifiers(gene_table) # series with index =", "flag for a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path):", "\"Mitochondrial genes\" are defined here as all genes on the mitochondrial chromosome. TODO:", "interval cur[1] = iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur) return merged def", "genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)):", "for iv in intervals[1:]: # interval starts inside/right after current interval if iv[0]", "(Only for protein-coding genes.) TODO: docstring\"\"\" # get gene names that are guaranteed", "% species) with open(path) as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly", "_LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv( genome_annotation_file,", "are defined here as all protein-coding genes whose protein products are a structural", "enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only exon entries", "gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in zip( gene_ids, df_sel.iloc[:, 0], df_sel.iloc[:, 3],", "gene_table = gene_table.loc[sorted_gene_ids] # dictionary for holding list of intervals for each gene", "end]) _LOGGER.info('%d / %d exons from valid genes (%.1f %%).', valid, total, 100*(valid/float(total)))", "subunit (including fusion genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv'", "[bases for i in range(num_edits)] seq_list = [nt for nt in seq] mismatch", "a list of all ribosomal genes for a given species. \"Ribosomal genes\" are", "defined here as all protein-coding genes whose protein products are a structural component", "zip(comb, subs): if mut[pos] == nt: valid = False break mut[pos] = nt", "a directory is empty. Note: Also returns True if the directory doesn't exist.", "all possible k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is None:", "genes whose protein products are a structural component of the small or large", "\"\"\"Return the plotly javascript code. TODO: docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js'", "gene_counts = counts.loc[gene_table['name']] gene_ids = gene_table.index.tolist() gene_ids = [name if c == 1", "of all ribosomal genes for a given species. \"Ribosomal genes\" are defined here", "df_sel.iloc[:, 4]): total += 1 try: gene = gene_table.loc[id_] except KeyError: # this", "after current interval cur[1] = iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur) return", "reverse complement of a nucleotide sequence. TODO: docstring\"\"\" rc = { 'A': 'T',", "= Ensembl ID, value = unique gene name #genes = pd.Series(index=gene_table.index, data=gene_names) #", "returns True if the directory doesn't exist. TODO: docstring \"\"\" is_empty = True", "mm = seq[:pos] + nuc + seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the", "append: write_mode = 'ab' with open(output_file, write_mode) as ofh: for f in input_files:", "get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal genes for a given species. \"Ribosomal", "with hamming distance 1 to `seq`. TODO: docstring\"\"\" for pos in range(len(seq)): for", "the genes' names.\"\"\" # count occurrences for each of gene name counts =", "with open(path) as fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript code.", "TODO: docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_):", "a gzip file. TODO: docstring\"\"\" subproc = subprocess.Popen('gunzip -c \"%s\"' % path, shell=True,", "whether a directory is empty. Note: Also returns True if the directory doesn't", "here as all genes on the mitochondrial chromosome. TODO: docstring \"\"\" path =", "range(len(seq)): for nuc in ['A', 'C', 'G', 'T']: if nuc != seq[pos]: mm", "return kmer_list def get_mismatch_sequences(seq): \"\"\"Generates all nucleotide sequences with hamming distance 1 to", "var = kmer + nuc get_all_kmers(k, var, kmer_list) if not kmer: return kmer_list", "not contained in the gene table continue gene_chrom = gene_table.loc[id_, 'chromosome'] if chrom", "None: bases = set('ACGT') length = len(seq) all_bases = [bases for i in", "return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory is empty. Note: Also", "\"\"\"Utility functions.\"\"\" import subprocess import logging import os import shutil import stat import", "fh: return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript code. TODO: docstring \"\"\"", "'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8') def is_empty_dir(dir_): \"\"\"Tests whether a directory is empty. Note:", "df_sel.iloc[:, 8].apply( lambda x: gtf.parse_attributes(x)['gene_id']) for id_, chrom, start, end in zip( gene_ids,", "= kmer + nuc get_all_kmers(k, var, kmer_list) if not kmer: return kmer_list def", "TODO: docstring\"\"\" for pos in range(len(seq)): for nuc in ['A', 'C', 'G', 'T']:", "in enumerate(pd.read_csv( genome_annotation_file, dtype={0: str}, sep='\\t', comment='#', header=None, chunksize=chunksize)): # select only exon", "current interval cur[1] = iv[1] else: merged.append(cur) cur = list(iv) merged.append(cur) return merged", "# dictionary for holding list of intervals for each gene gene_exons = OrderedDict([id_,", "strand, and then position sorted_gene_ids = sorted( [id_ for id_ in gene_table.index], key=lambda", "whose protein products are a structural component of the small or large ribosomal", "nuc != seq[pos]: mm = seq[:pos] + nuc + seq[(pos+1):] yield mm def", "kmer_list=None): \"\"\"Returns all possible k-mer sequences (for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list", "mismatch.append(''.join(mut)) return sorted(mismatch) def concatenate_files(input_files, output_file, append=False): write_mode = 'wb' if append: write_mode", "a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a", "return fh.read().split('\\n') def get_plotly_js(): \"\"\"Return the plotly javascript code. TODO: docstring \"\"\" #", "a given species. \"Ribosomal genes\" are defined here as all protein-coding genes whose", "for nuc in ['A', 'C', 'G', 'T']: if nuc != seq[pos]: mm =", "file and get a dictionary of gene=>list of exon intervals. (Only for protein-coding", "shell=True, stdout=subprocess.PIPE) return subproc def get_all_kmers(k, kmer='', kmer_list=None): \"\"\"Returns all possible k-mer sequences", "in range(len(seq)): for nuc in ['A', 'C', 'G', 'T']: if nuc != seq[pos]:", "fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal genes for", "fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of all ribosomal genes for a given", "+ seq[(pos+1):] yield mm def get_reverse_complement(seq): \"\"\"Returns the reverse complement of a nucleotide", "'C': 'G' } compseq = ''.join([rc[nuc] for nuc in seq[::-1]]) return compseq def", "with index = Ensembl ID, value = unique gene name #genes = pd.Series(index=gene_table.index,", "else: valid += 1 gene_exons[id_].append([start-1, end]) _LOGGER.info('%d / %d exons from valid genes", "0 _LOGGER.info('Parsing GTF file \"%s\" in chunks...', genome_annotation_file) for i, df in enumerate(pd.read_csv(", "os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def", "logging import os import shutil import stat import itertools from collections import OrderedDict", "gene_table.loc[id_] except KeyError: # this gene is not contained in the gene table", "here as all protein-coding genes whose protein products are a structural component of", "length = len(seq) all_bases = [bases for i in range(num_edits)] seq_list = [nt", "edits (%d) than the length ' 'of the sequence (%d nt).' % (num_edits,", "species. \"Mitochondrial genes\" are defined here as all genes on the mitochondrial chromosome.", "'data', 'gene_lists', 'mitochondrial_%s.tsv' % species) with open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'):", "javascript code. TODO: docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly', path).decode('utf-8')", "TODO: docstring\"\"\" if kmer_list is None: kmer_list = [] if len(kmer) == k:", "genes\" are defined here as all protein-coding genes whose protein products are a", "plotly javascript code. TODO: docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js' return resource_string('plotly',", "are defined here as all genes on the mitochondrial chromosome. TODO: docstring \"\"\"", "compseq = ''.join([rc[nuc] for nuc in seq[::-1]]) return compseq def get_gene_exons(gene_table, genome_annotation_file, chunksize=10000):", "zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a gzip file. TODO: docstring\"\"\" subproc =", "species) with open(path) as fh: return fh.read().split('\\n') def get_ribosomal_genes(species='human'): \"\"\"Get a list of", "= list(intervals[0]) for iv in intervals[1:]: # interval starts inside/right after current interval", "merge_intervals(intervals): \"\"\"Merge overlapping intervals. TODO: docstring\"\"\" if not intervals: return [] # sort", "docstring \"\"\" path = os.path.join(singlecell._root, 'data', 'gene_lists', 'ribosomal_%s.tsv' % species) with open(path) as", "len(seq): raise ValueError('Asked to make make more edits (%d) than the length '", "\"\"\"Sets the user executable flag for a file.\"\"\" st = os.stat(path) os.chmod(path, st.st_mode", "\"\"\"Get a list of all ribosomal genes for a given species. \"Ribosomal genes\"", "# interval ends after current interval cur[1] = iv[1] else: merged.append(cur) cur =", "the plotly javascript code. TODO: docstring \"\"\" # resource_string? path = 'package_data/plotly.min.js' return", "stat.S_IEXEC) def zcat_subproc(path): \"\"\"Creates a subprocess for decompressing a gzip file. TODO: docstring\"\"\"", "import singlecell _LOGGER = logging.getLogger(__name__) def get_readable_gene_identifiers(gene_table: ExpGeneTable): \"\"\"Return unique gene identifiers that", "'chromosome'], gene_table.loc[id_, 'position'] < 0, abs(gene_table.loc[id_, 'position'])]) #genes = genes.loc[sorted_gene_ids] gene_table = gene_table.loc[sorted_gene_ids]", "component of the small or large ribosomal subunit (including fusion genes). TODO: docstring", "= sorted( [id_ for id_ in gene_table.index], key=lambda id_: [gene_table.loc[id_, 'chromosome'], gene_table.loc[id_, 'position']", "large ribosomal subunit (including fusion genes). TODO: docstring \"\"\" path = os.path.join(singlecell._root, 'data',", "itertools.product(*all_bases): mut = seq_list[:] valid = True for pos, nt in zip(comb, subs):", "(for A/C/G/T alphabet). TODO: docstring\"\"\" if kmer_list is None: kmer_list = [] if", "valid genes (%.1f %%).', valid, total, 100*(valid/float(total))) return gene_exons def merge_intervals(intervals): \"\"\"Merge overlapping", "intervals for each gene gene_exons = OrderedDict([id_, []] for id_ in gene_table.index) valid" ]
[ "os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api", "= start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers': headers, 'sign':", "@app.route('/sign') def sign(): global api url = request.args.get('url', '') headers = dict(request.headers) try:", "= api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE)", "api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data", "import frida from flask import Flask, jsonify, request from hook import start_hook REMOTE_DEVICE", "data = api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app crash: {e}') api =", "= Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url = request.args.get('url',", "flask import Flask, jsonify, request from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '')", "headers) except frida.InvalidOperationError as e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data =", "import Flask, jsonify, request from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app", "headers = dict(request.headers) try: data = api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app", "jsonify, request from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__)", "api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers': headers, 'sign': data, }) if __name__", "e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({", "frida from flask import Flask, jsonify, request from hook import start_hook REMOTE_DEVICE =", "start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url = request.args.get('url', '') headers = dict(request.headers)", "sign(): global api url = request.args.get('url', '') headers = dict(request.headers) try: data =", "{e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers':", "import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign')", "request.args.get('url', '') headers = dict(request.headers) try: data = api.exports.sign(url, headers) except frida.InvalidOperationError as", "Flask, jsonify, request from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app =", "'') app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url", "= start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url = request.args.get('url', '') headers =", "frida.InvalidOperationError as e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers)", "start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers': headers, 'sign': data,", "REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign():", "Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url = request.args.get('url', '')", "headers) return jsonify({ 'url': url, 'headers': headers, 'sign': data, }) if __name__ ==", "global api url = request.args.get('url', '') headers = dict(request.headers) try: data = api.exports.sign(url,", "= request.args.get('url', '') headers = dict(request.headers) try: data = api.exports.sign(url, headers) except frida.InvalidOperationError", "= dict(request.headers) try: data = api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app crash:", "data = api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers': headers, 'sign': data, })", "'') headers = dict(request.headers) try: data = api.exports.sign(url, headers) except frida.InvalidOperationError as e:", "api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers': headers,", "from flask import Flask, jsonify, request from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE',", "= os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global", "return jsonify({ 'url': url, 'headers': headers, 'sign': data, }) if __name__ == '__main__':", "jsonify({ 'url': url, 'headers': headers, 'sign': data, }) if __name__ == '__main__': app.run()", "= api.exports.sign(url, headers) return jsonify({ 'url': url, 'headers': headers, 'sign': data, }) if", "app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url =", "def sign(): global api url = request.args.get('url', '') headers = dict(request.headers) try: data", "dict(request.headers) try: data = api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app crash: {e}')", "print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({ 'url':", "api = start_hook(REMOTE_DEVICE) @app.route('/sign') def sign(): global api url = request.args.get('url', '') headers", "api url = request.args.get('url', '') headers = dict(request.headers) try: data = api.exports.sign(url, headers)", "url = request.args.get('url', '') headers = dict(request.headers) try: data = api.exports.sign(url, headers) except", "crash: {e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return jsonify({ 'url': url,", "as e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url, headers) return", "import os import frida from flask import Flask, jsonify, request from hook import", "request from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api", "hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api = start_hook(REMOTE_DEVICE)", "start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api = start_hook(REMOTE_DEVICE) @app.route('/sign') def", "os import frida from flask import Flask, jsonify, request from hook import start_hook", "try: data = api.exports.sign(url, headers) except frida.InvalidOperationError as e: print(f'app crash: {e}') api", "except frida.InvalidOperationError as e: print(f'app crash: {e}') api = start_hook(REMOTE_DEVICE) data = api.exports.sign(url,", "from hook import start_hook REMOTE_DEVICE = os.getenv('REMOTE_DEVICE', '') app = Flask(__name__) api =" ]
[ "} def _is_instance(self, value): return isinstance(value, datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES:", "elif isinstance(value, datetime): self.value = value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats)", "and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value = value", "self.value = value elif isinstance(value, datetime): self.value = value.date() elif isinstance(value, py2to3.string): _value", "value.', } def _is_instance(self, value): return isinstance(value, time) def set_value(self, value): if self._is_instance(value):", "= value elif isinstance(value, datetime): self.value = value.time() elif isinstance(value, py2to3.string): _value =", "in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.value _value = self.strptime(self.value, self._date_formats)", "TypeError): continue else: return result return None def strftime(self, value): if self._serialize_format: return", "if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if _value is None and self.invalid:", "aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField): date_formats", "None if isinstance(self.value, date): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value", "is required.', 'invalid': 'Invalid time value.', } def _is_instance(self, value): return isinstance(value, time)", "False def strptime(self, value, formats): for f in formats: try: result = datetime.strptime(value,", "if isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField):", "return None if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value", "elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid = self.value is None def", "} def _is_instance(self, value): return isinstance(value, date) def set_value(self, value): if self._is_instance(value): self.value", "return None if isinstance(self.value, date): return self.value _value = self.strptime(self.value, self._date_formats) if _value:", "{ 'required': 'This field is required.', 'invalid': 'Invalid date value.', } def __init__(self,", "import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as v", "is required.', 'invalid': 'Invalid date value.', } def __init__(self, formats=None, serialize_to=None, *args, **kwargs):", "raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value,", "= self.strptime(self.value, self._date_formats) if _value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def", "not None: self.value = _value.time() self.invalid = _value is None def _to_native(self): if", "= f except (ValueError, TypeError): continue else: return result return None def strftime(self,", "value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not None:", "import validators as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = {", "not None: self.value = _value.date() self.invalid = _value is None def _to_native(self): if", "SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, py2to3.string):", "class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages = { 'required': 'This field is", "self.invalid = self.value is None def _is_instance(self, value): return False def strptime(self, value,", "error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date value.', }", "self._date_formats) if _value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value):", "in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self):", "-*- coding: utf-8 -*- from datetime import datetime, date, time from aserializer.utils import", "= _value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if", "py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not None: self.value = _value.date()", "time) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value", "return isinstance(value, time) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value,", "py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return", "_value = self.strptime(self.value, self._date_formats) if _value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names)", "= ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field is required.', 'invalid':", "} def _is_instance(self, value): return isinstance(value, time) def set_value(self, value): if self._is_instance(value): self.value", "SerializerFieldValueError from aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ]", "def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value =", "= ['%Y-%m-%d', ] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid", "if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.date() elif isinstance(value,", "required.', 'invalid': 'Invalid date time value.', } def _is_instance(self, value): return isinstance(value, datetime)", "= _value.time() self.invalid = _value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES:", "field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, py2to3.string): self.value", "result return None def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return", "isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return", "isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats", "is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time):", "None if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in", "datetime import datetime, date, time from aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField,", "field is required.', 'invalid': 'Invalid date value.', } def __init__(self, formats=None, serialize_to=None, *args,", "value): return isinstance(value, date) def set_value(self, value): if self._is_instance(value): self.value = value elif", "self.value = value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is", "serialize_to self._current_format = None self.invalid = False def validate(self): if self.ignore: return if", "date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field is required.',", "f) self._current_format = f except (ValueError, TypeError): continue else: return result return None", "BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f',", "return False def strptime(self, value, formats): for f in formats: try: result =", "'Invalid time value.', } def _is_instance(self, value): return isinstance(value, time) def set_value(self, value):", "= ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid", "date) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value", "self.value is None def _is_instance(self, value): return False def strptime(self, value, formats): for", "elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f',", "if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and", "= _value.date() return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages = {", "= value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not", "if _value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if", "= self.strptime(value, self._date_formats) if _value is not None: self.value = _value.date() self.invalid =", "_to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.value _value", "return self.value self.value = self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d',", "= value elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid = self.value is", "self.value = _value.date() return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages =", "f except (ValueError, TypeError): continue else: return result return None def strftime(self, value):", "self.invalid = False def validate(self): if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names)", "if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value)", "strptime(self, value, formats): for f in formats: try: result = datetime.strptime(value, f) self._current_format", "v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This field", "is required.', 'invalid': 'Invalid date value.', } def _is_instance(self, value): return isinstance(value, date)", "{ 'required': 'This field is required.', 'invalid': 'Invalid time value.', } def _is_instance(self,", "self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.date() elif isinstance(value, py2to3.string):", "from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField):", "self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.time() elif isinstance(value, py2to3.string):", "None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return", "self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages = {", "None if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in", "isinstance(value, time) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime):", "] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date value.',", "def validate(self): if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in", "return None if isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value, self._date_formats) return self.value", "return _value = self.strptime(self.value, self._date_formats) if _value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'],", "self.value = value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is", "self.strptime(value, self._date_formats) if _value is not None: self.value = _value.date() self.invalid = _value", "isinstance(value, datetime): self.value = value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if", "for f in formats: try: result = datetime.strptime(value, f) self._current_format = f except", "isinstance(value, date) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime):", "py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid = self.value is None def _is_instance(self, value):", "] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid time value.',", "self.strptime(value, self._date_formats) self.invalid = self.value is None def _is_instance(self, value): return False def", "None: self.value = _value.date() self.invalid = _value is None def _to_native(self): if self.value", "elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not None: self.value", "in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self):", "**kwargs) self._date_formats = formats or self.date_formats self._serialize_format = serialize_to self._current_format = None self.invalid", "'Invalid date time value.', } def _is_instance(self, value): return isinstance(value, datetime) def _to_native(self):", "raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if _value is", "-*- from datetime import datetime, date, time from aserializer.utils import py2to3 from aserializer.fields.base", "isinstance(self.value, date): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.date()", "self._date_formats) if _value is not None: self.value = _value.time() self.invalid = _value is", "= _value.date() self.invalid = _value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES:", "= value elif isinstance(value, datetime): self.value = value.date() elif isinstance(value, py2to3.string): _value =", "self._date_formats) if _value is not None: self.value = _value.date() self.invalid = _value is", "self.date_formats self._serialize_format = serialize_to self._current_format = None self.invalid = False def validate(self): if", "f in formats: try: result = datetime.strptime(value, f) self._current_format = f except (ValueError,", "isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid = self.value is None def _is_instance(self,", "self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats", "if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.value self.value =", "time value.', } def _is_instance(self, value): return isinstance(value, datetime) def _to_native(self): if self.value", "def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.value", "raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'],", "self.value = value elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid = self.value", "value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z',", "'required': 'This field is required.', 'invalid': 'Invalid time value.', } def _is_instance(self, value):", "self._current_format = None self.invalid = False def validate(self): if self.ignore: return if self.invalid:", "isinstance(self.value, time): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.time()", "or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if", "self.value = value elif isinstance(value, datetime): self.value = value.time() elif isinstance(value, py2to3.string): _value", "if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.value _value =", "False def validate(self): if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value", "self.value = self.strptime(value, self._date_formats) self.invalid = self.value is None def _is_instance(self, value): return", "v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.value _value = self.strptime(self.value, self._date_formats) if", "_is_instance(self, value): return isinstance(value, time) def set_value(self, value): if self._is_instance(value): self.value = value", "_is_instance(self, value): return isinstance(value, date) def set_value(self, value): if self._is_instance(value): self.value = value", "def _is_instance(self, value): return isinstance(value, datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return", "'This field is required.', 'invalid': 'Invalid date value.', } def _is_instance(self, value): return", "is not None: self.value = _value.time() self.invalid = _value is None def _to_native(self):", "'invalid': 'Invalid date value.', } def _is_instance(self, value): return isinstance(value, date) def set_value(self,", "return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date):", "self.value = self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages", "} def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats", "v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if", "_to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.strftime(self.value) return", "None if isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value, self._date_formats) return self.value class", "self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required", "datetime): self.value = value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value", "in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value", "return isinstance(value, date) def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value,", "_value.date() self.invalid = _value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return", "SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names)", "__init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats", "set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.date()", "is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date):", "time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None", "datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None", "py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This", "class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This field is", "# -*- coding: utf-8 -*- from datetime import datetime, date, time from aserializer.utils", "py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not None: self.value = _value.time()", "is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value", "aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages =", "required.', 'invalid': 'Invalid time value.', } def _is_instance(self, value): return isinstance(value, time) def", "{ 'required': 'This field is required.', 'invalid': 'Invalid date time value.', } def", "self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value,", "def _is_instance(self, value): return False def strptime(self, value, formats): for f in formats:", "return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or", "*args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats self._serialize_format = serialize_to", "self._current_format = f except (ValueError, TypeError): continue else: return result return None def", "return None if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value", "try: result = datetime.strptime(value, f) self._current_format = f except (ValueError, TypeError): continue else:", "strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat())", "= self.strptime(self.value, self._date_formats) if _value: self.value = _value.date() return self.value class TimeField(BaseDatetimeField): date_formats", "_value: self.value = _value.date() return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages", "_to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.strftime(self.value) return", "_value = self.strptime(value, self._date_formats) if _value is not None: self.value = _value.date() self.invalid", "self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages = { 'required': 'This field", "if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField):", "None def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else:", "return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if", "= { 'required': 'This field is required.', 'invalid': 'Invalid time value.', } def", "= None self.invalid = False def validate(self): if self.ignore: return if self.invalid: raise", "_value.date() return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages = { 'required':", "None self.invalid = False def validate(self): if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'],", "value.', } def _is_instance(self, value): return isinstance(value, date) def set_value(self, value): if self._is_instance(value):", "return None def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format)", "TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages = { 'required': 'This field is required.',", "'This field is required.', 'invalid': 'Invalid time value.', } def _is_instance(self, value): return", "value.', } def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats =", "self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.value _value = self.strptime(self.value,", "self._date_formats) if _value: self.value = _value.date() return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S',", "v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if", "(self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats)", "isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return", "in formats: try: result = datetime.strptime(value, f) self._current_format = f except (ValueError, TypeError):", "and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value,", "_value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value):", "def _is_instance(self, value): return isinstance(value, time) def set_value(self, value): if self._is_instance(value): self.value =", "def set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, py2to3.string): self.value =", "formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats self._serialize_format", "datetime, date, time from aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from", "value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages =", "if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value)", "= datetime.strptime(value, f) self._current_format = f except (ValueError, TypeError): continue else: return result", "isinstance(value, datetime): self.value = value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if", "def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.strftime(self.value)", "aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as", "datetime): return self.value self.value = self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats =", "_is_instance(self, value): return False def strptime(self, value, formats): for f in formats: try:", "date value.', } def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats", "value, formats): for f in formats: try: result = datetime.strptime(value, f) self._current_format =", "class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field", "time from aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import", "value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.time() elif", "_to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.strftime(self.value) return", "time value.', } def _is_instance(self, value): return isinstance(value, time) def set_value(self, value): if", "None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return", "self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S']", "if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES:", "serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats self._serialize_format =", "self.invalid = _value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None", "value): return isinstance(value, datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if", "return None if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value", "def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or", "'This field is required.', 'invalid': 'Invalid date time value.', } def _is_instance(self, value):", "self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages = { 'required':", "'This field is required.', 'invalid': 'Invalid date value.', } def __init__(self, formats=None, serialize_to=None,", "_to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.value _value", "else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = {", "return isinstance(value, datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value,", "date_formats = ['%Y-%m-%d', ] error_messages = { 'required': 'This field is required.', 'invalid':", "v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.value _value = self.strptime(self.value, self._date_formats) if", "self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats self._serialize_format = serialize_to self._current_format = None", "self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value = value elif", "_value is not None: self.value = _value.time() self.invalid = _value is None def", "datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return", "return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.date() return self.value", "date_formats = ['%H:%M:%S', ] error_messages = { 'required': 'This field is required.', 'invalid':", "in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.value _value = self.strptime(self.value, self._date_formats)", "None: self.value = _value.time() self.invalid = _value is None def _to_native(self): if self.value", "return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.time() return self.value", "_value = self.strptime(value, self._date_formats) if _value is not None: self.value = _value.time() self.invalid", "self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.date() return self.value class", "from aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators", "_value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.date() return self.value class TimeField(BaseDatetimeField):", "from datetime import datetime, date, time from aserializer.utils import py2to3 from aserializer.fields.base import", "isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not None: self.value =", "_value.time() self.invalid = _value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return", "time): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.time() return", "formats or self.date_formats self._serialize_format = serialize_to self._current_format = None self.invalid = False def", "<reponame>orderbird/aserializer<filename>aserializer/fields/time_fields.py<gh_stars>0 # -*- coding: utf-8 -*- from datetime import datetime, date, time from", "import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField): date_formats =", "if isinstance(self.value, date): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value =", "self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def", "if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES:", "field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if _value is None and", "self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise", "if _value is not None: self.value = _value.date() self.invalid = _value is None", "'%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date time", "datetime): self.value = value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value", "def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.strftime(self.value)", "def _is_instance(self, value): return isinstance(value, date) def set_value(self, value): if self._is_instance(value): self.value =", "self._serialize_format = serialize_to self._current_format = None self.invalid = False def validate(self): if self.ignore:", "except (ValueError, TypeError): continue else: return result return None def strftime(self, value): if", "None if isinstance(self.value, time): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value", "if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value)", "DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field is", "def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time): return self.value", "or self.date_formats self._serialize_format = serialize_to self._current_format = None self.invalid = False def validate(self):", "date value.', } def _is_instance(self, value): return isinstance(value, date) def set_value(self, value): if", "= self.strptime(value, self._date_formats) self.invalid = self.value is None def _is_instance(self, value): return False", "continue else: return result return None def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format)", "return result return None def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format:", "= { 'required': 'This field is required.', 'invalid': 'Invalid date time value.', }", "v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value =", "formats): for f in formats: try: result = datetime.strptime(value, f) self._current_format = f", "self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value,", "'required': 'This field is required.', 'invalid': 'Invalid date value.', } def _is_instance(self, value):", "= { 'required': 'This field is required.', 'invalid': 'Invalid date value.', } def", "if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.time() elif isinstance(value,", "'required': 'This field is required.', 'invalid': 'Invalid date value.', } def __init__(self, formats=None,", "['%Y-%m-%d', ] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date", "return None if isinstance(self.value, time): return self.value _value = self.strptime(self.value, self._date_formats) if _value:", "'%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date", "if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity):", "['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid", "self.value self.value = self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ]", "= self.strptime(self.value, self._date_formats) return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages =", "if _value: self.value = _value.date() return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ]", "utf-8 -*- from datetime import datetime, date, time from aserializer.utils import py2to3 from", "field is required.', 'invalid': 'Invalid time value.', } def _is_instance(self, value): return isinstance(value,", "field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if", "date): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value = _value.date() return", "isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return", "= ['%H:%M:%S', ] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid", "self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return", "if self.value in v.VALIDATORS_EMPTY_VALUES and (self.required or self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value):", "self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def", "value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not None:", "self._date_formats) self.invalid = self.value is None def _is_instance(self, value): return False def strptime(self,", "field is required.', 'invalid': 'Invalid date time value.', } def _is_instance(self, value): return", "self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if _value is None and self.invalid: raise", "required.', 'invalid': 'Invalid date value.', } def _is_instance(self, value): return isinstance(value, date) def", "'invalid': 'Invalid date time value.', } def _is_instance(self, value): return isinstance(value, datetime) def", "v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if", "from aserializer.fields import validators as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages", "= value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats) if _value is not", "isinstance(value, datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime):", "BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This field is required.',", "= False def validate(self): if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if", "else: return result return None def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif", "is required.', 'invalid': 'Invalid date time value.', } def _is_instance(self, value): return isinstance(value,", "**kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats self._serialize_format = serialize_to self._current_format", "(ValueError, TypeError): continue else: return result return None def strftime(self, value): if self._serialize_format:", "if self._is_instance(value): self.value = value elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid", "is not None: self.value = _value.date() self.invalid = _value is None def _to_native(self):", "required.', 'invalid': 'Invalid date value.', } def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField,", "_value is not None: self.value = _value.date() self.invalid = _value is None def", "py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields import validators as v class", "error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid time value.', }", "is None def _is_instance(self, value): return False def strptime(self, value, formats): for f", "DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages = { 'required': 'This field is required.',", "_value is None def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value,", "import datetime, date, time from aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError", "self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.strftime(self.value) return py2to3._unicode(self.value) def", "value elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid = self.value is None", "def strptime(self, value, formats): for f in formats: try: result = datetime.strptime(value, f)", "elif isinstance(value, datetime): self.value = value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value, self._date_formats)", "_to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.value self.value", "SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if _value is None", "set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, py2to3.string): self.value = self.strptime(value,", "formats: try: result = datetime.strptime(value, f) self._current_format = f except (ValueError, TypeError): continue", "return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, time):", "date): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None", "None if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in", "value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.date() elif", "return self.value class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages = { 'required': 'This", "def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.value", "value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class", "= serialize_to self._current_format = None self.invalid = False def validate(self): if self.ignore: return", "value elif isinstance(value, datetime): self.value = value.date() elif isinstance(value, py2to3.string): _value = self.strptime(value,", "in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value, self._date_formats)", "value elif isinstance(value, datetime): self.value = value.time() elif isinstance(value, py2to3.string): _value = self.strptime(value,", "self.strptime(value, self._date_formats) if _value is not None: self.value = _value.time() self.invalid = _value", "return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages", "return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'] error_messages = { 'required':", "self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages = { 'required': 'This field", "{ 'required': 'This field is required.', 'invalid': 'Invalid date value.', } def _is_instance(self,", "if isinstance(self.value, time): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES:", "self.strptime(self.value, self._date_formats) if _value: self.value = _value.date() return self.value class TimeField(BaseDatetimeField): date_formats =", "py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return", "error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date time value.',", "'required': 'This field is required.', 'invalid': 'Invalid date time value.', } def _is_instance(self,", "py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return", "value): return False def strptime(self, value, formats): for f in formats: try: result", "date time value.', } def _is_instance(self, value): return isinstance(value, datetime) def _to_native(self): if", "self.identity): raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names) if self._is_instance(self.value): return _value = self.strptime(self.value, self._date_formats) if _value", "field is required.', 'invalid': 'Invalid date value.', } def _is_instance(self, value): return isinstance(value,", "= formats or self.date_formats self._serialize_format = serialize_to self._current_format = None self.invalid = False", "value.', } def _is_instance(self, value): return isinstance(value, datetime) def _to_native(self): if self.value in", "self.strptime(self.value, self._date_formats) if _value is None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self,", "self._date_formats = formats or self.date_formats self._serialize_format = serialize_to self._current_format = None self.invalid =", "class TimeField(BaseDatetimeField): date_formats = ['%H:%M:%S', ] error_messages = { 'required': 'This field is", "'invalid': 'Invalid date value.', } def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args,", "['%H:%M:%S', ] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid time", "datetime.strptime(value, f) self._current_format = f except (ValueError, TypeError): continue else: return result return", "if isinstance(self.value, time): return self.value _value = self.strptime(self.value, self._date_formats) if _value: self.value =", "if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.value _value =", "if _value is not None: self.value = _value.time() self.invalid = _value is None", "super(BaseDatetimeField, self).__init__(*args, **kwargs) self._date_formats = formats or self.date_formats self._serialize_format = serialize_to self._current_format =", "= self.value is None def _is_instance(self, value): return False def strptime(self, value, formats):", "None def _is_instance(self, value): return False def strptime(self, value, formats): for f in", "'Invalid date value.', } def __init__(self, formats=None, serialize_to=None, *args, **kwargs): super(BaseDatetimeField, self).__init__(*args, **kwargs)", "= self.strptime(value, self._date_formats) if _value is not None: self.value = _value.time() self.invalid =", "date, time from aserializer.utils import py2to3 from aserializer.fields.base import BaseSerializerField, SerializerFieldValueError from aserializer.fields", "def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.strftime(self.value)", "'invalid': 'Invalid time value.', } def _is_instance(self, value): return isinstance(value, time) def set_value(self,", "_is_instance(self, value): return isinstance(value, datetime) def _to_native(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None", "value): return isinstance(value, time) def set_value(self, value): if self._is_instance(value): self.value = value elif", "'Invalid date value.', } def _is_instance(self, value): return isinstance(value, date) def set_value(self, value):", "value): if self._is_instance(value): self.value = value elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats)", "self.value = _value.time() self.invalid = _value is None def _to_native(self): if self.value in", "validate(self): if self.ignore: return if self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) if self.value in v.VALIDATORS_EMPTY_VALUES", "self.value = _value.date() self.invalid = _value is None def _to_native(self): if self.value in", "self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, date): return self.value _value = self.strptime(self.value,", "self._is_instance(value): self.value = value elif isinstance(value, py2to3.string): self.value = self.strptime(value, self._date_formats) self.invalid =", "return py2to3._unicode(self.value) def _to_python(self): if self.value in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime):", "date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This field is required.', 'invalid':", "in v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.strftime(self.value) return py2to3._unicode(self.value) def _to_python(self):", "set_value(self, value): if self._is_instance(value): self.value = value elif isinstance(value, datetime): self.value = value.time()", "None and self.invalid: raise SerializerFieldValueError(self._error_messages['invalid'], field_names=self.names) def set_value(self, value): if self._is_instance(value): self.value =", "def strftime(self, value): if self._serialize_format: return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return", "return self.value class DateField(BaseDatetimeField): date_formats = ['%Y-%m-%d', ] error_messages = { 'required': 'This", "result = datetime.strptime(value, f) self._current_format = f except (ValueError, TypeError): continue else: return", "as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This", "validators as v class BaseDatetimeField(BaseSerializerField): date_formats = ['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required':", "v.VALIDATORS_EMPTY_VALUES: return None if isinstance(self.value, datetime): return self.value self.value = self.strptime(self.value, self._date_formats) return", "['%Y-%m-%dT%H:%M:%S.%f', ] error_messages = { 'required': 'This field is required.', 'invalid': 'Invalid date", "return value.strftime(self._serialize_format) elif self._current_format: return value.strftime(self._current_format) else: return py2to3._unicode(value.isoformat()) class DatetimeField(BaseDatetimeField): date_formats =", "coding: utf-8 -*- from datetime import datetime, date, time from aserializer.utils import py2to3" ]
[ "Section(data[u]) if 'peon' in unit['type'].lower() and u != 'e000' and u != 'udr'", "get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e: sg.popup(str(e),title='Error') filter_listbox(data, window, values, '', options)", "from ..model.search import SearchableList from ..view import newproduction from . import get_string_unit, RACES,", "grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values = window.read() if event is None: break", "unit = Section(data[u]) if 'peon' in unit['type'].lower() and u != 'e000' and u", "if 'peon' in unit['type'].lower() and u != 'e000' and u != 'udr' and", "not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1),", "u in data: unit = Section(data[u]) if 'peon' in unit['type'].lower() and u !=", "u != 'udr' and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window =", "= SearchableList() for u in data: unit = Section(data[u]) if 'peon' in unit['type'].lower()", "1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values = window.read() if event is None:", "..model.objectdata import ObjectData from ..model.search import SearchableList from ..view import newproduction from .", "filter_listbox from myconfigparser import Section def open_window(data): options = SearchableList() for u in", "SearchableList() for u in data: unit = Section(data[u]) if 'peon' in unit['type'].lower() and", "SearchableList from ..view import newproduction from . import get_string_unit, RACES, filter_listbox from myconfigparser", "options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while", "from . import get_string_unit, RACES, filter_listbox from myconfigparser import Section def open_window(data): options", "window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values", "is None: break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except", "from myconfigparser import Section def open_window(data): options = SearchableList() for u in data:", "newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values = window.read() if event", "unit['type'].lower() and u != 'e000' and u != 'udr' and 'A00J' not in", "u != 'e000' and u != 'udr' and 'A00J' not in unit['abilList']: options.append('{name}", "Section def open_window(data): options = SearchableList() for u in data: unit = Section(data[u])", "= window.read() if event is None: break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'],", "None: break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception", "window.find_element('Options').Update(sorted(options)) while True: event, values = window.read() if event is None: break elif", "import SearchableList from ..view import newproduction from . import get_string_unit, RACES, filter_listbox from", "'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40,", "try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e: sg.popup(str(e),title='Error') filter_listbox(data, window, values,", "import ObjectData from ..model.search import SearchableList from ..view import newproduction from . import", "from ..view import newproduction from . import get_string_unit, RACES, filter_listbox from myconfigparser import", "[{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True:", "unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options))", "event is None: break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success')", "in unit['type'].lower() and u != 'e000' and u != 'udr' and 'A00J' not", "from ..model.objectdata import ObjectData from ..model.search import SearchableList from ..view import newproduction from", "== 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e: sg.popup(str(e),title='Error') filter_listbox(data,", "'e000' and u != 'udr' and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1]))", "'peon' in unit['type'].lower() and u != 'e000' and u != 'udr' and 'A00J'", "..view import newproduction from . import get_string_unit, RACES, filter_listbox from myconfigparser import Section", "newproduction from . import get_string_unit, RACES, filter_listbox from myconfigparser import Section def open_window(data):", "sg from ..model.objectdata import ObjectData from ..model.search import SearchableList from ..view import newproduction", "True: event, values = window.read() if event is None: break elif event ==", "open_window(data): options = SearchableList() for u in data: unit = Section(data[u]) if 'peon'", "if event is None: break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']])", "for u in data: unit = Section(data[u]) if 'peon' in unit['type'].lower() and u", "ObjectData from ..model.search import SearchableList from ..view import newproduction from . import get_string_unit,", "window.read() if event is None: break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]),", "sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values = window.read()", "break elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as", "import get_string_unit, RACES, filter_listbox from myconfigparser import Section def open_window(data): options = SearchableList()", "!= 'e000' and u != 'udr' and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u,", "def open_window(data): options = SearchableList() for u in data: unit = Section(data[u]) if", "'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e: sg.popup(str(e),title='Error') filter_listbox(data, window,", "!= 'udr' and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New", "default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values = window.read() if event is", "PySimpleGUI as sg from ..model.objectdata import ObjectData from ..model.search import SearchableList from ..view", "and u != 'e000' and u != 'udr' and 'A00J' not in unit['abilList']:", "and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(),", "options = SearchableList() for u in data: unit = Section(data[u]) if 'peon' in", "'udr' and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production',", "in data: unit = Section(data[u]) if 'peon' in unit['type'].lower() and u != 'e000'", "get_string_unit, RACES, filter_listbox from myconfigparser import Section def open_window(data): options = SearchableList() for", "data: unit = Section(data[u]) if 'peon' in unit['type'].lower() and u != 'e000' and", "= sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values =", "RACES, filter_listbox from myconfigparser import Section def open_window(data): options = SearchableList() for u", "elif event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e:", "..model.search import SearchableList from ..view import newproduction from . import get_string_unit, RACES, filter_listbox", "myconfigparser import Section def open_window(data): options = SearchableList() for u in data: unit", "Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event, values = window.read() if", "while True: event, values = window.read() if event is None: break elif event", "import PySimpleGUI as sg from ..model.objectdata import ObjectData from ..model.search import SearchableList from", "ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e: sg.popup(str(e),title='Error') filter_listbox(data, window, values, '',", ". import get_string_unit, RACES, filter_listbox from myconfigparser import Section def open_window(data): options =", "name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize() window.find_element('Options').Update(sorted(options)) while True: event,", "event, values = window.read() if event is None: break elif event == 'Submit':", "in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window = sg.Window('New Production', newproduction.get_layout(), default_element_size=(40, 1), grab_anywhere=False).Finalize()", "= Section(data[u]) if 'peon' in unit['type'].lower() and u != 'e000' and u !=", "as sg from ..model.objectdata import ObjectData from ..model.search import SearchableList from ..view import", "values = window.read() if event is None: break elif event == 'Submit': try:", "and u != 'udr' and 'A00J' not in unit['abilList']: options.append('{name} [{code}]'.format(code=u, name=unit['Name'][1:-1])) window", "event == 'Submit': try: ObjectData(data).create_production(values['Name'], get_string_unit(values['Options'][0]), RACES[values['ProdRace']]) sg.popup('Success') except Exception as e: sg.popup(str(e),title='Error')", "import newproduction from . import get_string_unit, RACES, filter_listbox from myconfigparser import Section def", "import Section def open_window(data): options = SearchableList() for u in data: unit =" ]
[ "has betrayed us, now have a grudge move = DEFECT self.grudge = True", "to life being good for x in range(0, 5): # no grudge everyone", "= False return move class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self):", "grudge everyone gets along move = self.player.play(x * [COOPERATE], x * [COOPERATE], x", "self.assertEqual(self.player.grudge, False) # now test the grudge moves = 2 * [COOPERATE] +", "return \"Soft Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2): # are we cooperating", "move = self.player.play(x * [COOPERATE], x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE)", "''' from DallasPlayers.player import COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft", "SoftGrudgerPlayer() def testPlay(self): for x in range(0, 5): # no grudge everyone gets", "player using an soft grudge strategy ''' from DallasPlayers.player import COOPERATE, Player, DEFECT", "[DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out while len(moves) >", "back to life being good for x in range(0, 5): # no grudge", "# someone has betrayed us, now have a grudge move = DEFECT self.grudge", "* [COOPERATE] + 3 * [DEFECT] # HERE COMES THE GRUDGE move =", "[COOPERATE] + 3 * [DEFECT] else: # still have a grudge move =", "life being good for x in range(0, 5): # no grudge everyone gets", "if not self.grudge: # lets work together move = COOPERATE if oppHistory1[-1] ==", "# now back to life being good for x in range(0, 5): #", "# HERE COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge,", "* [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test the", "being good for x in range(0, 5): # no grudge everyone gets along", "self.moves.pop() if len(self.moves) == 0: # can move on now, no more grudge", "oppHistory2): self.grudge = False self.moves = [] move = COOPERATE else: if not", "[COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test the grudge", "* [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test the grudge moves =", "# are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves = []", "self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves = [] move = COOPERATE else: if", "TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for x in range(0, 5):", "Player - Co-operates until the opponent defects, in such case opponent is punished", "not self.grudge: # lets work together move = COOPERATE if oppHistory1[-1] == DEFECT", "= 2 * [COOPERATE] + 3 * [DEFECT] # HERE COMES THE GRUDGE", "someone has betrayed us, now have a grudge move = DEFECT self.grudge =", "good for x in range(0, 5): # no grudge everyone gets along move", "COOPERATE else: if not self.grudge: # lets work together move = COOPERATE if", "* [DEFECT] # HERE COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move,", "class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates until the opponent defects, in", "False self.moves = [] def studentID(self): return \"20652186\" def agentName(self): return \"Soft Grudge", "grudge move = DEFECT self.grudge = True self.moves = 2 * [COOPERATE] +", "everyone gets along move = self.player.play(x * [COOPERATE], x * [COOPERATE], x *", "self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge, False) #", "Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates until", "return move class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for x", "[COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out while len(moves) > 0:", "self.grudge = False self.moves = [] def studentID(self): return \"20652186\" def agentName(self): return", "# no grudge everyone gets along move = self.player.play(x * [COOPERATE], x *", "soft grudge strategy ''' from DallasPlayers.player import COOPERATE, Player, DEFECT import unittest class", "move class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for x in", "gets along move = self.player.play(x * [COOPERATE], x * [COOPERATE], x * [COOPERATE])", "have a grudge move = DEFECT self.grudge = True self.moves = 2 *", "# lets work together move = COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1]", "def testPlay(self): for x in range(0, 5): # no grudge everyone gets along", "now test the grudge moves = 2 * [COOPERATE] + 3 * [DEFECT]", "= self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge, False)", "[COOPERATE], x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now", "from DallasPlayers.player import COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger", "<NAME> @id: 20652186 @class: CS686 @date: 2016-02-13 @note: contains a player using an", "the opponent defects, in such case opponent is punished with d,d,d,d,c,c. \"\"\" def", "grudge should be gone self.assertEqual(self.player.grudge, False) # now back to life being good", "class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for x in range(0,", "us, now have a grudge move = DEFECT self.grudge = True self.moves =", "is punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False self.moves = []", "@date: 2016-02-13 @note: contains a player using an soft grudge strategy ''' from", "agentName(self): return \"Soft Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2): # are we", "0: # can move on now, no more grudge self.grudge = False return", "* [COOPERATE] + 3 * [DEFECT] else: # still have a grudge move", "case opponent is punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False self.moves", "cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves = [] move = COOPERATE", "self.moves = 2 * [COOPERATE] + 3 * [DEFECT] else: # still have", "2016-02-13 @note: contains a player using an soft grudge strategy ''' from DallasPlayers.player", "range(0, 5): # no grudge everyone gets along move = self.player.play(x * [COOPERATE],", "x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test", "move on now, no more grudge self.grudge = False return move class TestPlayer(unittest.TestCase):", "DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates until the", "oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT: # someone has betrayed us, now", "DEFECT: # someone has betrayed us, now have a grudge move = DEFECT", "* [COOPERATE], x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) #", "\"20652186\" def agentName(self): return \"Soft Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2): #", "Co-operates until the opponent defects, in such case opponent is punished with d,d,d,d,c,c.", "self.player.play(x * [COOPERATE], x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False)", "# grudge should be gone self.assertEqual(self.player.grudge, False) # now back to life being", "oppHistory2): # are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves =", "with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False self.moves = [] def studentID(self):", "@id: 20652186 @class: CS686 @date: 2016-02-13 @note: contains a player using an soft", "work together move = COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT:", "Player\" def play(self, myHistory, oppHistory1, oppHistory2): # are we cooperating if self.first_move(oppHistory1, oppHistory2):", "together move = COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT: #", "if oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT: # someone has betrayed us,", "import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates until the opponent", "such case opponent is punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False", "False self.moves = [] move = COOPERATE else: if not self.grudge: # lets", "COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT: # someone has betrayed", "now have a grudge move = DEFECT self.grudge = True self.moves = 2", "== DEFECT: # someone has betrayed us, now have a grudge move =", "[DEFECT] # HERE COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT)", "len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should", "- Co-operates until the opponent defects, in such case opponent is punished with", "\"\"\" Soft Grudger Player - Co-operates until the opponent defects, in such case", "= COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT: # someone has", "= True self.moves = 2 * [COOPERATE] + 3 * [DEFECT] else: #", "self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test the grudge moves = 2 *", "move = COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1] == DEFECT: # someone", "= SoftGrudgerPlayer() def testPlay(self): for x in range(0, 5): # no grudge everyone", "testPlay(self): for x in range(0, 5): # no grudge everyone gets along move", "are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves = [] move", "\"Soft Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2): # are we cooperating if", "Grudger Player - Co-operates until the opponent defects, in such case opponent is", "in such case opponent is punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge =", "strategy ''' from DallasPlayers.player import COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\"", "0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be gone", "defects, in such case opponent is punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge", "= False self.moves = [] def studentID(self): return \"20652186\" def agentName(self): return \"Soft", "move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge,", "= self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out while", "betrayed us, now have a grudge move = DEFECT self.grudge = True self.moves", "grudge move = self.moves.pop() if len(self.moves) == 0: # can move on now,", "= [] def studentID(self): return \"20652186\" def agentName(self): return \"Soft Grudge Player\" def", "= COOPERATE else: if not self.grudge: # lets work together move = COOPERATE", "have a grudge move = self.moves.pop() if len(self.moves) == 0: # can move", "def studentID(self): return \"20652186\" def agentName(self): return \"Soft Grudge Player\" def play(self, myHistory,", "if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves = [] move = COOPERATE else:", "no more grudge self.grudge = False return move class TestPlayer(unittest.TestCase): def setUp(self): self.player", "contains a player using an soft grudge strategy ''' from DallasPlayers.player import COOPERATE,", "a grudge move = DEFECT self.grudge = True self.moves = 2 * [COOPERATE]", "moves = 2 * [COOPERATE] + 3 * [DEFECT] # HERE COMES THE", "gone self.assertEqual(self.player.grudge, False) # now back to life being good for x in", "in range(0, 5): # no grudge everyone gets along move = self.player.play(x *", "== DEFECT or oppHistory2[-1] == DEFECT: # someone has betrayed us, now have", "False return move class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for", "[COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test the grudge moves = 2", "Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2): # are we cooperating if self.first_move(oppHistory1,", "move = COOPERATE else: if not self.grudge: # lets work together move =", "* [DEFECT] else: # still have a grudge move = self.moves.pop() if len(self.moves)", "grudge strategy ''' from DallasPlayers.player import COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player):", "grudge it out while len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move,", "self.moves = [] move = COOPERATE else: if not self.grudge: # lets work", "# grudge it out while len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE])", "== 0: # can move on now, no more grudge self.grudge = False", "self.moves = [] def studentID(self): return \"20652186\" def agentName(self): return \"Soft Grudge Player\"", "lets work together move = COOPERATE if oppHistory1[-1] == DEFECT or oppHistory2[-1] ==", "or oppHistory2[-1] == DEFECT: # someone has betrayed us, now have a grudge", "move = DEFECT self.grudge = True self.moves = 2 * [COOPERATE] + 3", "the grudge moves = 2 * [COOPERATE] + 3 * [DEFECT] # HERE", "myHistory, oppHistory1, oppHistory2): # are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False", "True self.moves = 2 * [COOPERATE] + 3 * [DEFECT] else: # still", "move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out", "no grudge everyone gets along move = self.player.play(x * [COOPERATE], x * [COOPERATE],", "[DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge, False) # now", "[COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge, False) # now back", "else: if not self.grudge: # lets work together move = COOPERATE if oppHistory1[-1]", "should be gone self.assertEqual(self.player.grudge, False) # now back to life being good for", "''' @author: <NAME> @id: 20652186 @class: CS686 @date: 2016-02-13 @note: contains a player", "self.grudge: # lets work together move = COOPERATE if oppHistory1[-1] == DEFECT or", "# still have a grudge move = self.moves.pop() if len(self.moves) == 0: #", "opponent defects, in such case opponent is punished with d,d,d,d,c,c. \"\"\" def __init__(self):", "COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) #", "False) # now back to life being good for x in range(0, 5):", "3 * [DEFECT] # HERE COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE])", "self.grudge = False self.moves = [] move = COOPERATE else: if not self.grudge:", "self.grudge = False return move class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer() def", "> 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge should be", "@author: <NAME> @id: 20652186 @class: CS686 @date: 2016-02-13 @note: contains a player using", "SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates until the opponent defects, in such", "for x in range(0, 5): # no grudge everyone gets along move =", "DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out while len(moves) > 0: move =", "now back to life being good for x in range(0, 5): # no", "= 2 * [COOPERATE] + 3 * [DEFECT] else: # still have a", "if len(self.moves) == 0: # can move on now, no more grudge self.grudge", "False) # now test the grudge moves = 2 * [COOPERATE] + 3", "HERE COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True)", "+ 3 * [DEFECT] # HERE COMES THE GRUDGE move = self.player.play([COOPERATE], [DEFECT],", "x in range(0, 5): # no grudge everyone gets along move = self.player.play(x", "def play(self, myHistory, oppHistory1, oppHistory2): # are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge", "until the opponent defects, in such case opponent is punished with d,d,d,d,c,c. \"\"\"", "self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out while len(moves)", "now, no more grudge self.grudge = False return move class TestPlayer(unittest.TestCase): def setUp(self):", "= False self.moves = [] move = COOPERATE else: if not self.grudge: #", "True) # grudge it out while len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT],", "oppHistory1, oppHistory2): # are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves", "oppHistory2[-1] == DEFECT: # someone has betrayed us, now have a grudge move", "self.assertEqual(self.player.grudge, True) # grudge it out while len(moves) > 0: move = self.player.play([COOPERATE],", "opponent is punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False self.moves =", "on now, no more grudge self.grudge = False return move class TestPlayer(unittest.TestCase): def", "[] def studentID(self): return \"20652186\" def agentName(self): return \"Soft Grudge Player\" def play(self,", "\"\"\" def __init__(self): self.grudge = False self.moves = [] def studentID(self): return \"20652186\"", "= [] move = COOPERATE else: if not self.grudge: # lets work together", "[DEFECT] else: # still have a grudge move = self.moves.pop() if len(self.moves) ==", "self.assertEqual(move, moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge, False) # now back to", "import COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player -", "def setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for x in range(0, 5): #", "__init__(self): self.grudge = False self.moves = [] def studentID(self): return \"20652186\" def agentName(self):", "we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge = False self.moves = [] move =", "2 * [COOPERATE] + 3 * [DEFECT] # HERE COMES THE GRUDGE move", "out while len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) #", "using an soft grudge strategy ''' from DallasPlayers.player import COOPERATE, Player, DEFECT import", "= DEFECT self.grudge = True self.moves = 2 * [COOPERATE] + 3 *", "= self.player.play(x * [COOPERATE], x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge,", "+ 3 * [DEFECT] else: # still have a grudge move = self.moves.pop()", "while len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop()) # grudge", "move = self.moves.pop() if len(self.moves) == 0: # can move on now, no", "more grudge self.grudge = False return move class TestPlayer(unittest.TestCase): def setUp(self): self.player =", "[] move = COOPERATE else: if not self.grudge: # lets work together move", "moves.pop()) # grudge should be gone self.assertEqual(self.player.grudge, False) # now back to life", "an soft grudge strategy ''' from DallasPlayers.player import COOPERATE, Player, DEFECT import unittest", "self.grudge = True self.moves = 2 * [COOPERATE] + 3 * [DEFECT] else:", "CS686 @date: 2016-02-13 @note: contains a player using an soft grudge strategy '''", "@class: CS686 @date: 2016-02-13 @note: contains a player using an soft grudge strategy", "along move = self.player.play(x * [COOPERATE], x * [COOPERATE], x * [COOPERATE]) self.assertEqual(move,", "self.assertEqual(self.player.grudge, False) # now back to life being good for x in range(0,", "GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it", "d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False self.moves = [] def studentID(self): return", "else: # still have a grudge move = self.moves.pop() if len(self.moves) == 0:", "# can move on now, no more grudge self.grudge = False return move", "[COOPERATE] + 3 * [DEFECT] # HERE COMES THE GRUDGE move = self.player.play([COOPERATE],", "len(self.moves) == 0: # can move on now, no more grudge self.grudge =", "setUp(self): self.player = SoftGrudgerPlayer() def testPlay(self): for x in range(0, 5): # no", "self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge it out while len(moves) > 0: move", "DallasPlayers.player import COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player", "DEFECT self.grudge = True self.moves = 2 * [COOPERATE] + 3 * [DEFECT]", "it out while len(moves) > 0: move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, moves.pop())", "play(self, myHistory, oppHistory1, oppHistory2): # are we cooperating if self.first_move(oppHistory1, oppHistory2): self.grudge =", "def agentName(self): return \"Soft Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2): # are", "3 * [DEFECT] else: # still have a grudge move = self.moves.pop() if", "a player using an soft grudge strategy ''' from DallasPlayers.player import COOPERATE, Player,", "unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates until the opponent defects,", "can move on now, no more grudge self.grudge = False return move class", "self.player = SoftGrudgerPlayer() def testPlay(self): for x in range(0, 5): # no grudge", "COOPERATE, Player, DEFECT import unittest class SoftGrudgerPlayer(Player): \"\"\" Soft Grudger Player - Co-operates", "COOPERATE) self.assertEqual(self.player.grudge, False) # now test the grudge moves = 2 * [COOPERATE]", "# now test the grudge moves = 2 * [COOPERATE] + 3 *", "grudge moves = 2 * [COOPERATE] + 3 * [DEFECT] # HERE COMES", "20652186 @class: CS686 @date: 2016-02-13 @note: contains a player using an soft grudge", "THE GRUDGE move = self.player.play([COOPERATE], [DEFECT], [COOPERATE]) self.assertEqual(move, DEFECT) self.assertEqual(self.player.grudge, True) # grudge", "test the grudge moves = 2 * [COOPERATE] + 3 * [DEFECT] #", "Soft Grudger Player - Co-operates until the opponent defects, in such case opponent", "grudge self.grudge = False return move class TestPlayer(unittest.TestCase): def setUp(self): self.player = SoftGrudgerPlayer()", "5): # no grudge everyone gets along move = self.player.play(x * [COOPERATE], x", "= self.moves.pop() if len(self.moves) == 0: # can move on now, no more", "return \"20652186\" def agentName(self): return \"Soft Grudge Player\" def play(self, myHistory, oppHistory1, oppHistory2):", "studentID(self): return \"20652186\" def agentName(self): return \"Soft Grudge Player\" def play(self, myHistory, oppHistory1,", "2 * [COOPERATE] + 3 * [DEFECT] else: # still have a grudge", "@note: contains a player using an soft grudge strategy ''' from DallasPlayers.player import", "be gone self.assertEqual(self.player.grudge, False) # now back to life being good for x", "def __init__(self): self.grudge = False self.moves = [] def studentID(self): return \"20652186\" def", "x * [COOPERATE]) self.assertEqual(move, COOPERATE) self.assertEqual(self.player.grudge, False) # now test the grudge moves", "still have a grudge move = self.moves.pop() if len(self.moves) == 0: # can", "DEFECT or oppHistory2[-1] == DEFECT: # someone has betrayed us, now have a", "a grudge move = self.moves.pop() if len(self.moves) == 0: # can move on", "punished with d,d,d,d,c,c. \"\"\" def __init__(self): self.grudge = False self.moves = [] def" ]
[ "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations", "null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection', name='instrumentals', field=models.ManyToManyField(blank=True, related_name='_beats_instrumentalcollection_instrumentals_+', to='beats.Instrumental'), ), ]", "2021-08-10 02:38 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "# Generated by Django 3.2.3 on 2021-08-10 02:38 from django.db import migrations, models", "model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection', name='instrumentals', field=models.ManyToManyField(blank=True, related_name='_beats_instrumentalcollection_instrumentals_+',", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations =", "by Django 3.2.3 on 2021-08-10 02:38 from django.db import migrations, models import django.db.models.deletion", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists',", "[ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True,", "3.2.3 on 2021-08-10 02:38 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'),", "('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',", "name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection', name='instrumentals', field=models.ManyToManyField(blank=True, related_name='_beats_instrumentalcollection_instrumentals_+', to='beats.Instrumental'),", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ]", "('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True,", "dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental',", "'0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING,", "migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection', name='instrumentals', field=models.ManyToManyField(blank=True,", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats',", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'),", "'0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'),", "02:38 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "on 2021-08-10 02:38 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "Django 3.2.3 on 2021-08-10 02:38 from django.db import migrations, models import django.db.models.deletion class", "= [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField( model_name='instrumental', name='producer',", "= [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection',", "Generated by Django 3.2.3 on 2021-08-10 02:38 from django.db import migrations, models import", "Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [ migrations.AlterField(", "operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField(", "class Migration(migrations.Migration): dependencies = [ ('artists', '0002_artist_bio'), ('beats', '0002_instrumental_img_file'), ] operations = [", "[ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection', name='instrumentals',", "] operations = [ migrations.AlterField( model_name='instrumental', name='producer', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ),", "field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='artists.artist'), ), migrations.AlterField( model_name='instrumentalcollection', name='instrumentals', field=models.ManyToManyField(blank=True, related_name='_beats_instrumentalcollection_instrumentals_+', to='beats.Instrumental'), )," ]
[ "return objs[0] elif len(objs) > 1: logging.error(f'{filt} object matched multiple objects! This could", "sdostring): ''' Allows us to fix our dictionary every time we create STIX", "infra -> software (if any link is missing we need to create that", "= None l = [] if not obj_type is None: if direction ==", "= stixdict.copy() extensions = {} for key, value in newList.items(): if key.startswith('x_'): addString", "key + '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] =", "get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None l", "ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is None:", "= Filter('source_ref', 'contains', obj_type) elif direction == 'out': f = Filter('target_ref', 'contains', obj_type)", "infra is None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel])", "def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list and", "behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs)", "return None elif ip_obj is None: return None else: return get_connected_obj(stix_loader, ip_obj, direction='in',", "multi_filt(op='=', **kwargs): fs = FilterSet() for key in kwargs: if key == 'op':", "!= 1: return None elif ip_obj is None: return None else: return get_connected_obj(stix_loader,", "IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging import re def gen_uuid(string): return f'{string}--{uuid4()}'", "in stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return s #Get infra connected", "SDOType(**stixdict) return s #Get infra connected to ip: # - get ip by", "attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string in", "Energy Alliance, LLC from uuid import uuid4 from stix2.datastore import Filter, FilterSet from", "+'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows us to fix our dictionary every", "attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string in obj.extensions: return obj.extensions[attr_string]", "fs.add(Filter(key, op , kwargs[key])) return fs #TODO HELPER FUNCTION (param = class (software,", "missing we need to create that link) # self.ms_source = self.ms.source # self.ms_sink", "to fix our dictionary every time we create STIX Objects to have all", "infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is None: infra =", "CustomExtension, properties import logging import re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj,", "get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet() if not filters is None: if", "- get connected by type, port, protocol, service def get_objects(filt, stix_loader): objs =", "relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software, ret_objs)", "value # - get connected by type (infra) # - get connected by", "connected by type, port, protocol, service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if", "type (infra) # - get connected by type, port, protocol, service def get_objects(filt,", "{infra}') if infra is None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip)", "multiple objects! This could cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol,", "multiple objects! This could cause unexpected behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader):", "rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is None: software", "logging.debug(f'type of str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict)", "1: return objs[0] elif len(objs) > 1: logging.error(f'{filt} object matched multiple objects! This", "re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet()", "def get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet() if not filters is None:", "self.ms_source = self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs = [] ip", "fs.add(f) else: fs.add(filters) if direction == 'in': f = Filter('target_ref', '=', obj.id) fs.add()", "get_objects(filt, stix_loader) if objs is None: return None elif len(objs) == 1: return", "objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return objs else: return None", "get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None l = [] if not obj_type", "filters=filt) if len(objs) > 0: return objs else: return None def get_related_single(obj, filt,", "Alliance, LLC from uuid import uuid4 from stix2.datastore import Filter, FilterSet from stix2", "get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet() for key in", "'contains', obj_type) elif direction == 'out': f = Filter('target_ref', 'contains', obj_type) rels =", "is None: if direction == 'in': f = Filter('source_ref', 'contains', obj_type) elif direction", "is None: return None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs):", "value=ip_addr), stix_loader) if ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip,", "else: fs.add(filters) if direction == 'in': f = Filter('target_ref', '=', obj.id) fs.add() elif", "rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol),", "This could cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader):", "obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) < 1:", "stix2.datastore import Filter, FilterSet from stix2 import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension,", "# - get ip by value # - get connected by type (infra)", "ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra", "= IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is", "direction == 'out': f = Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction,", "port, protocol, service, stix_loader): #need to go from ip -> infra -> software", "value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) # id = '' # print('our", "key == 'op': continue fs.add(Filter(key, op , kwargs[key])) return fs #TODO HELPER FUNCTION", "f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) #", "Filter, FilterSet from stix2 import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import", "FUNCTION (param = class (software, process,etc), dictionary) #TODO: returns created object (extensions:()) #TODO:", "objs else: return None def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if objs", "filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return objs else:", "is None: software = Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service,", "> 0 : return objs else: return None def get_object(filt, stix_loader): objs =", "'=', ip))[0] if type(ip_obj) == list and len(ip_obj) != 1: return None elif", "stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return s #Get infra connected to", "stixdict['extensions'] = extensions # print(stixdict) # id = '' # print('our type: ',", "elif ip_obj is None: return None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def", "[] if not obj_type is None: if direction == 'in': f = Filter('source_ref',", "= stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list and len(ip_obj) != 1: return", "fs.add(filters) if direction == 'in': f = Filter('target_ref', '=', obj.id) fs.add() elif direction", "objects! This could cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol, service,", "# print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if 'id' not in stixdict.keys(): stixdict['id']", "ip))[0] if type(ip_obj) == list and len(ip_obj) != 1: return None elif ip_obj", "in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True", "#need to go from ip -> infra -> software (if any link is", "if type(ip_obj) == list and len(ip_obj) != 1: return None elif ip_obj is", "return None elif len(objs) == 1: return objs[0] elif len(objs) > 1: logging.error(f'{filt}", "direction == 'out': f = Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed", "return None def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader) if objs", "object matched multiple objects! This could cause unexpected behavior!') return objs[0] def get_related_multi(obj,", "get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) < 1: return None else: return objs[0]", "LLC from uuid import uuid4 from stix2.datastore import Filter, FilterSet from stix2 import", "= get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is None: infra = Infrastructure(name=ip.value)", "= Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software',", "def multi_filt(op='=', **kwargs): fs = FilterSet() for key in kwargs: if key ==", "ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list and len(ip_obj) != 1:", "Battelle Energy Alliance, LLC from uuid import uuid4 from stix2.datastore import Filter, FilterSet", "#TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows us to fix", "# if sdostring == 'Software': # id = gen_uuid('software') # elif sdostring ==", "if objs is None: return None elif len(objs) == 1: return objs[0] elif", "f = Filter('source_ref', 'contains', obj_type) elif direction == 'out': f = Filter('target_ref', 'contains',", "'op': continue fs.add(Filter(key, op , kwargs[key])) return fs #TODO HELPER FUNCTION (param =", "# elif sdostring == 'Infrastructure': # id = gen_uuid('infrastructure') # elif SDOType ==", "rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string): if", "in an extensions list ''' newList = stixdict.copy() extensions = {} for key,", "= Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed to get_rels: {direction}') return", "link) # self.ms_source = self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs =", "'=', obj.id) fs.add() elif direction == 'out': f = Filter('source_ref', '=', obj.id) fs.add()", "not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] =", "list and len(ip_obj) != 1: return None elif ip_obj is None: return None", "to go from ip -> infra -> software (if any link is missing", "stix_loader): #need to go from ip -> infra -> software (if any link", "'out': f = Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f) for", "get ip by value # - get connected by type (infra) # -", "if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra,", "'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in',", "return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type)", "'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not in stixdict.keys(): stixdict['spec_version']", "get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra =", "s = SDOType(**stixdict) return s #Get infra connected to ip: # - get", "Allows us to fix our dictionary every time we create STIX Objects to", "rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel])", "FilterSet() if not filters is None: if type(filters) == list: for f in", "l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction,", "None l = [] if not obj_type is None: if direction == 'in':", "# print(stixdict) # id = '' # print('our type: ', type(SDOType)) # if", "[] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None: ip = IPv4Address(value=ip_addr)", "fs #TODO HELPER FUNCTION (param = class (software, process,etc), dictionary) #TODO: returns created", "= gen_uuid('infrastructure') # elif SDOType == 'Process': # id = gen_uuid('process') # else:", "in newList.items(): if key.startswith('x_'): addString = key + '_inl' logging.debug(f'type of str: {type(addString)}')", "'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj,", "else: # print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if 'id' not in stixdict.keys():", "direction=direction, obj_type=obj_type) if len(objs) < 1: return None else: return objs[0] def get_infrastructure_by_ip(stix_loader,", "direction='in', filters=None): fs = FilterSet() if not filters is None: if type(filters) ==", "# stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj,", "if not obj_type is None: if direction == 'in': f = Filter('source_ref', 'contains',", "True if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return", "return s #Get infra connected to ip: # - get ip by value", "matched multiple objects! This could cause unexpected behavior!') return objs[0] def get_related_multi(obj, filt,", "Relationship, CustomExtension, properties import logging import re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader,", "len(objs) == 1: return objs[0] elif len(objs) > 1: logging.error(f'{filt} object matched multiple", "# id = '' # print('our type: ', type(SDOType)) # if sdostring ==", "# ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return", "if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not in stixdict.keys():", "direction='in', obj_type=None): f = None l = [] if not obj_type is None:", "l = [] if not obj_type is None: if direction == 'in': f", "allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) #", "objs[0] elif len(objs) > 1: logging.error(f'{filt} object matched multiple objects! This could cause", "'Software': # id = gen_uuid('software') # elif sdostring == 'Infrastructure': # id =", "fix our dictionary every time we create STIX Objects to have all custom", "id = gen_uuid('infrastructure') # elif SDOType == 'Process': # id = gen_uuid('process') #", "fs = FilterSet() if not filters is None: if type(filters) == list: for", "dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows us to fix our", "== 'in': f = Filter('source_ref', 'contains', obj_type) elif direction == 'out': f =", "# - get connected by type, port, protocol, service def get_objects(filt, stix_loader): objs", "id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software,", "direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) < 1: return", "len(objs) > 1: logging.error(f'{filt} object matched multiple objects! This could cause unexpected behavior!')", "go from ip -> infra -> software (if any link is missing we", "get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader) if objs is None: return", "direction == 'in': f = Filter('source_ref', 'contains', obj_type) elif direction == 'out': f", "ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet() for key in kwargs:", "if key.startswith('x_'): addString = key + '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] =", "objs[0] def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0:", "stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list and len(ip_obj) != 1: return None", "if not filters is None: if type(filters) == list: for f in filters:", "for rel in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out':", "FilterSet from stix2 import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging", "if direction == 'in': f = Filter('target_ref', '=', obj.id) fs.add() elif direction ==", "= get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is None: software = Software(name=f'{service", "'Infrastructure': # id = gen_uuid('infrastructure') # elif SDOType == 'Process': # id =", "to ip: # - get ip by value # - get connected by", "filters is None: if type(filters) == list: for f in filters: fs.add(f) else:", "Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f) for rel in rels:", "extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) # id = ''", "if sdostring == 'Software': # id = gen_uuid('software') # elif sdostring == 'Infrastructure':", "multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is None: infra = Infrastructure(name=ip.value) rel =", "service, stix_loader): #need to go from ip -> infra -> software (if any", "for f in filters: fs.add(f) else: fs.add(filters) if direction == 'in': f =", "def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if objs is None: return None", "FilterSet() for key in kwargs: if key == 'op': continue fs.add(Filter(key, op ,", "get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return objs", "all custom properties in an extensions list ''' newList = stixdict.copy() extensions =", "return (software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string) elif", "self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader)", "', type(SDOType)) # if sdostring == 'Software': # id = gen_uuid('software') # elif", "+ '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions", "obj.id) fs.add() elif direction == 'out': f = Filter('source_ref', '=', obj.id) fs.add() else:", "software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is None: software =", "filters=f) for rel in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction ==", "= value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) # id = '' #", "HELPER FUNCTION (param = class (software, process,etc), dictionary) #TODO: returns created object (extensions:())", "= self.ms.sink # stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if", "getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string in obj.extensions: return obj.extensions[attr_string] return None", "# else: # print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if 'id' not in", "filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader) if objs is None: return None", "ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'):", "== 'in': f = Filter('target_ref', '=', obj.id) fs.add() elif direction == 'out': f", "{direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None l =", "None: if direction == 'in': f = Filter('source_ref', 'contains', obj_type) elif direction ==", "get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) <", "# id = gen_uuid('process') # else: # print('SDO TYPE NOT INF/Software/PROCESS') # print(id)", "behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to go from", "get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string", "ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj,", "< 1: return None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value',", "elif len(objs) == 1: return objs[0] elif len(objs) > 1: logging.error(f'{filt} object matched", "== 1: return objs[0] elif len(objs) > 1: logging.error(f'{filt} object matched multiple objects!", "'' # print('our type: ', type(SDOType)) # if sdostring == 'Software': # id", "''' newList = stixdict.copy() extensions = {} for key, value in newList.items(): if", "len(objs) > 0: return objs else: return None def get_related_single(obj, filt, stix_loader): objs", "'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom']", "is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}')", "f in filters: fs.add(f) else: fs.add(filters) if direction == 'in': f = Filter('target_ref',", "sdostring == 'Infrastructure': # id = gen_uuid('infrastructure') # elif SDOType == 'Process': #", "if 'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys():", "# id = gen_uuid('software') # elif sdostring == 'Infrastructure': # id = gen_uuid('infrastructure')", "f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet() if not filters is", "self.ms.sink # stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip", "direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs =", "NOT INF/Software/PROCESS') # print(id) if 'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if", "connected to ip: # - get ip by value # - get connected", "extensions # print(stixdict) # id = '' # print('our type: ', type(SDOType)) #", "list: for f in filters: fs.add(f) else: fs.add(filters) if direction == 'in': f", "obj_type is None: if direction == 'in': f = Filter('source_ref', 'contains', obj_type) elif", "1: logging.error(f'{filt} object matched multiple objects! This could cause unexpected behavior!') return objs[0]", "1: return None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=',", "**kwargs): fs = FilterSet() for key in kwargs: if key == 'op': continue", "get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to go from ip -> infra ->", "ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip)", "0 : return objs else: return None def get_object(filt, stix_loader): objs = get_objects(filt,", "from ip -> infra -> software (if any link is missing we need", "= stix_loader.ms_source.query(filt) if len(objs) > 0 : return objs else: return None def", "None: return None elif len(objs) == 1: return objs[0] elif len(objs) > 1:", "= get_rels(stix_loader, obj, direction=direction, filters=f) for rel in rels: if direction == 'in':", "- get ip by value # - get connected by type (infra) #", "is None: if type(filters) == list: for f in filters: fs.add(f) else: fs.add(filters)", "we create STIX Objects to have all custom properties in an extensions list", "= get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) < 1: return None else: return", "len(ip_obj) != 1: return None elif ip_obj is None: return None else: return", "if len(objs) > 0: return objs else: return None def get_related_single(obj, filt, stix_loader):", "obj.id) fs.add() else: logging.error(f'Unexpected direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader,", "stix2 import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging import re", "2021, Battelle Energy Alliance, LLC from uuid import uuid4 from stix2.datastore import Filter,", "created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows", "if ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader)", "None: software = Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True,", "else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software)", "import uuid4 from stix2.datastore import Filter, FilterSet from stix2 import Software, Process, IPv4Address,", "key, value in newList.items(): if key.startswith('x_'): addString = key + '_inl' logging.debug(f'type of", "gen_uuid('infrastructure') # elif SDOType == 'Process': # id = gen_uuid('process') # else: #", "(param = class (software, process,etc), dictionary) #TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_))", "elif len(objs) > 1: logging.error(f'{filt} object matched multiple objects! This could cause unexpected", "direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader,", "obj_type=None): f = None l = [] if not obj_type is None: if", "stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) > 0 : return objs else: return", "len(objs) > 0 : return objs else: return None def get_object(filt, stix_loader): objs", "if hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string in obj.extensions:", "rel in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref))", "== 'Process': # id = gen_uuid('process') # else: # print('SDO TYPE NOT INF/Software/PROCESS')", "return fs #TODO HELPER FUNCTION (param = class (software, process,etc), dictionary) #TODO: returns", "= FilterSet() for key in kwargs: if key == 'op': continue fs.add(Filter(key, op", "not filters is None: if type(filters) == list: for f in filters: fs.add(f)", "get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) > 0 : return objs else:", "obj_type=obj_type) if len(objs) < 1: return None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip):", "(infra) # - get connected by type, port, protocol, service def get_objects(filt, stix_loader):", "stix_loader) if software is None: software = Software(name=f'{service if service else f\"{port}/{protocol}\"} Server',", "stix_loader) if objs is None: return None elif len(objs) == 1: return objs[0]", "Infrastructure, Relationship, CustomExtension, properties import logging import re def gen_uuid(string): return f'{string}--{uuid4()}' def", "def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if", "elif sdostring == 'Infrastructure': # id = gen_uuid('infrastructure') # elif SDOType == 'Process':", "is None: return None elif len(objs) == 1: return objs[0] elif len(objs) >", "obj_type) elif direction == 'out': f = Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader,", "return None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs =", "could cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need", "return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) ==", "{} for key, value in newList.items(): if key.startswith('x_'): addString = key + '_inl'", "def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to go from ip -> infra", "= Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel", "> 0: return objs else: return None def get_related_single(obj, filt, stix_loader): objs =", "stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1' s", "custom properties in an extensions list ''' newList = stixdict.copy() extensions = {}", "import logging import re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None):", "fs.add() elif direction == 'out': f = Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected", "IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is None:", "stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return s #Get infra connected to ip:", "1: return None elif ip_obj is None: return None else: return get_connected_obj(stix_loader, ip_obj,", "return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None l = []", "key.startswith('x_'): addString = key + '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] = value", "# print('our type: ', type(SDOType)) # if sdostring == 'Software': # id =", "ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is None: infra", "cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to", "print(id) if 'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in", "stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) # id = '' # print('our type:", "print(f'get_related_single_infra: {infra}') if infra is None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has',", "get connected by type, port, protocol, service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt)", "from uuid import uuid4 from stix2.datastore import Filter, FilterSet from stix2 import Software,", "uuid import uuid4 from stix2.datastore import Filter, FilterSet from stix2 import Software, Process,", "us to fix our dictionary every time we create STIX Objects to have", "== 'Infrastructure': # id = gen_uuid('infrastructure') # elif SDOType == 'Process': # id", "stixdict.copy() extensions = {} for key, value in newList.items(): if key.startswith('x_'): addString =", "stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version'", "None: if type(filters) == list: for f in filters: fs.add(f) else: fs.add(filters) if", "kwargs: if key == 'op': continue fs.add(Filter(key, op , kwargs[key])) return fs #TODO", "Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel =", "ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra:", "= class (software, process,etc), dictionary) #TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add", "have all custom properties in an extensions list ''' newList = stixdict.copy() extensions", "logging.error(f'{filt} object matched multiple objects! This could cause unexpected behavior!') return objs[0] def", "objs = get_related_multi(obj, filt, stix_loader) if objs is None: return None elif len(objs)", "'2.1' s = SDOType(**stixdict) return s #Get infra connected to ip: # -", "every time we create STIX Objects to have all custom properties in an", "if direction == 'in': f = Filter('source_ref', 'contains', obj_type) elif direction == 'out':", "import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging import re def", "elif direction == 'out': f = Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj,", "in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1'", "objects! This could cause unexpected behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader): objs", "key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows us to fix our dictionary", ": return objs else: return None def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader)", "extensions = {} for key, value in newList.items(): if key.startswith('x_'): addString = key", "rel]) # ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string):", "f = None l = [] if not obj_type is None: if direction", ", kwargs[key])) return fs #TODO HELPER FUNCTION (param = class (software, process,etc), dictionary)", "create STIX Objects to have all custom properties in an extensions list '''", "by type (infra) # - get connected by type, port, protocol, service def", "l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None):", "= '' # print('our type: ', type(SDOType)) # if sdostring == 'Software': #", "fs = FilterSet() for key in kwargs: if key == 'op': continue fs.add(Filter(key,", "-> infra -> software (if any link is missing we need to create", "print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if 'id' not in stixdict.keys(): stixdict['id'] =", "need to create that link) # self.ms_source = self.ms.source # self.ms_sink = self.ms.sink", "direction == 'in': f = Filter('target_ref', '=', obj.id) fs.add() elif direction == 'out':", "= self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs = [] ip =", "obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f) for rel in rels: if direction", "'Process': # id = gen_uuid('process') # else: # print('SDO TYPE NOT INF/Software/PROCESS') #", "properties import logging import re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in',", "'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return s #Get", "def fix_stix(SDOType, stixdict, sdostring): ''' Allows us to fix our dictionary every time", "newList.items(): if key.startswith('x_'): addString = key + '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString]", "hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string in obj.extensions: return", "cause unexpected behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt)", "get_rels(stix_loader, obj, direction=direction, filters=f) for rel in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref))", "obj, direction='in', filters=None): fs = FilterSet() if not filters is None: if type(filters)", "= SDOType(**stixdict) return s #Get infra connected to ip: # - get ip", "return None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0]", "else: logging.error(f'Unexpected direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in',", "x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software,", "we need to create that link) # self.ms_source = self.ms.source # self.ms_sink =", "INF/Software/PROCESS') # print(id) if 'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom'", "= Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader)", "= True if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict)", "could cause unexpected behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj,", "software = Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software'))", "stix_loader): objs = get_objects(filt, stix_loader) if objs is None: return None elif len(objs)", "that link) # self.ms_source = self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs", "filt, stix_loader) if objs is None: return None elif len(objs) == 1: return", "stixdict, sdostring): ''' Allows us to fix our dictionary every time we create", "rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l", "> 1: logging.error(f'{filt} object matched multiple objects! This could cause unexpected behavior!') return", "TYPE NOT INF/Software/PROCESS') # print(id) if 'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring)", "ip -> infra -> software (if any link is missing we need to", "None def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if objs is None: return", "Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software,", "'in': f = Filter('target_ref', '=', obj.id) fs.add() elif direction == 'out': f =", "in filters: fs.add(f) else: fs.add(filters) if direction == 'in': f = Filter('target_ref', '=',", "type, port, protocol, service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) >", "type(SDOType)) # if sdostring == 'Software': # id = gen_uuid('software') # elif sdostring", "filters=None): fs = FilterSet() if not filters is None: if type(filters) == list:", "any link is missing we need to create that link) # self.ms_source =", "passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f =", "'_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions #", "stix_loader): objs = get_related_multi(obj, filt, stix_loader) if objs is None: return None elif", "to create that link) # self.ms_source = self.ms.source # self.ms_sink = self.ms.sink #", "x_protocol=protocol), stix_loader) if software is None: software = Software(name=f'{service if service else f\"{port}/{protocol}\"}", "def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader) if objs is None:", "process,etc), dictionary) #TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType,", "stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return objs else: return", "{type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) # id =", "#TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring):", "service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) > 0 : return", "= Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return", "<reponame>macbryc/IX-DiscoveryTools #Copyright 2021, Battelle Energy Alliance, LLC from uuid import uuid4 from stix2.datastore", "f = Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed to get_rels: {direction}')", "== 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader,", "get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list and len(ip_obj)", "direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f", "type(ip_obj) == list and len(ip_obj) != 1: return None elif ip_obj is None:", "Objects to have all custom properties in an extensions list ''' newList =", "elif direction == 'out': f = Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction", "- get connected by type (infra) # - get connected by type, port,", "ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj,", "# stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is", "get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if infra is None: infra = Infrastructure(name=ip.value) rel", "else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj)", "if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def", "Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port,", "= gen_uuid('software') # elif sdostring == 'Infrastructure': # id = gen_uuid('infrastructure') # elif", "type: ', type(SDOType)) # if sdostring == 'Software': # id = gen_uuid('software') #", "Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging import re def gen_uuid(string):", "by value # - get connected by type (infra) # - get connected", "= Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f) for rel in", "s #Get infra connected to ip: # - get ip by value #", "for key, value in newList.items(): if key.startswith('x_'): addString = key + '_inl' logging.debug(f'type", "= key + '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions']", "infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip,", "len(objs) < 1: return None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj =", "= gen_uuid('process') # else: # print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if 'id'", "to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None", "relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software", "None elif len(objs) == 1: return objs[0] elif len(objs) > 1: logging.error(f'{filt} object", "# self.ms_source = self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs = []", "return objs[0] def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) >", "gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet() if not", "None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if", "sdostring == 'Software': # id = gen_uuid('software') # elif sdostring == 'Infrastructure': #", "if key == 'op': continue fs.add(Filter(key, op , kwargs[key])) return fs #TODO HELPER", "op , kwargs[key])) return fs #TODO HELPER FUNCTION (param = class (software, process,etc),", "dictionary every time we create STIX Objects to have all custom properties in", "= [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None: ip =", "SDOType == 'Process': # id = gen_uuid('process') # else: # print('SDO TYPE NOT", "dictionary) #TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict,", "def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet() if", "str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) # id", "type(filters) == list: for f in filters: fs.add(f) else: fs.add(filters) if direction ==", "if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return s", "is None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software", "logging import re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs", "not obj_type is None: if direction == 'in': f = Filter('source_ref', 'contains', obj_type)", "else: return None def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader) if", "#Copyright 2021, Battelle Energy Alliance, LLC from uuid import uuid4 from stix2.datastore import", "stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None l = [] if", "stix_loader.ms_source.query(filt) if len(objs) > 0 : return objs else: return None def get_object(filt,", "objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list", "port, protocol, service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) > 0", "None def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader) if objs is", "def get_connected_objs(stix_loader, obj, direction='in', obj_type=None): f = None l = [] if not", "return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet() for key", "import re def gen_uuid(string): return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs =", "if len(objs) < 1: return None else: return objs[0] def get_infrastructure_by_ip(stix_loader, ip): ip_obj", "gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not in", "logging.error(f'Unexpected direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj, direction='in', obj_type=None):", "return objs else: return None def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt,", "if infra is None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra,", "return None def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if objs is None:", "gen_uuid('process') # else: # print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if 'id' not", "== 'Software': # id = gen_uuid('software') # elif sdostring == 'Infrastructure': # id", "(if any link is missing we need to create that link) # self.ms_source", "print(stixdict) # id = '' # print('our type: ', type(SDOType)) # if sdostring", "fs.add() else: logging.error(f'Unexpected direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def get_connected_objs(stix_loader, obj,", "stix_loader) if ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'),", "to have all custom properties in an extensions list ''' newList = stixdict.copy()", "# id = gen_uuid('infrastructure') # elif SDOType == 'Process': # id = gen_uuid('process')", "create that link) # self.ms_source = self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source", "''' Allows us to fix our dictionary every time we create STIX Objects", "return objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to go from ip", "obj, direction='in', obj_type=None): f = None l = [] if not obj_type is", "Filter('source_ref', 'contains', obj_type) elif direction == 'out': f = Filter('target_ref', 'contains', obj_type) rels", "returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): '''", "not in stixdict.keys(): stixdict['spec_version'] = '2.1' s = SDOType(**stixdict) return s #Get infra", "get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if objs is None: return None elif", "id = '' # print('our type: ', type(SDOType)) # if sdostring == 'Software':", "from stix2.datastore import Filter, FilterSet from stix2 import Software, Process, IPv4Address, Infrastructure, Relationship,", "in kwargs: if key == 'op': continue fs.add(Filter(key, op , kwargs[key])) return fs", "is missing we need to create that link) # self.ms_source = self.ms.source #", "'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f) for rel in rels: if", "== list and len(ip_obj) != 1: return None elif ip_obj is None: return", "-> software (if any link is missing we need to create that link)", "if len(objs) > 0 : return objs else: return None def get_object(filt, stix_loader):", "time we create STIX Objects to have all custom properties in an extensions", "print('our type: ', type(SDOType)) # if sdostring == 'Software': # id = gen_uuid('software')", "= get_objects(filt, stix_loader) if objs is None: return None elif len(objs) == 1:", "service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has',", "stix_loader) print(f'get_related_single_infra: {infra}') if infra is None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra,", "connected by type (infra) # - get connected by type, port, protocol, service", "def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return", "newList = stixdict.copy() extensions = {} for key, value in newList.items(): if key.startswith('x_'):", "import Filter, FilterSet from stix2 import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties", "protocol, service, stix_loader): #need to go from ip -> infra -> software (if", "# - get connected by type (infra) # - get connected by type,", "= gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not", "== 'out': f = Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f)", "Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging import re def gen_uuid(string): return", "Server', x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software,", "filters: fs.add(f) else: fs.add(filters) if direction == 'in': f = Filter('target_ref', '=', obj.id)", "# print(id) if 'id' not in stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not", "= get_related_multi(obj, filt, stix_loader) if objs is None: return None elif len(objs) ==", "Filter('target_ref', '=', obj.id) fs.add() elif direction == 'out': f = Filter('source_ref', '=', obj.id)", "'in': f = Filter('source_ref', 'contains', obj_type) elif direction == 'out': f = Filter('target_ref',", "an extensions list ''' newList = stixdict.copy() extensions = {} for key, value", "= '2.1' s = SDOType(**stixdict) return s #Get infra connected to ip: #", "get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is None: software = Software(name=f'{service if", "kwargs[key])) return fs #TODO HELPER FUNCTION (param = class (software, process,etc), dictionary) #TODO:", "stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return objs else: return None def get_related_single(obj,", "ip by value # - get connected by type (infra) # - get", "x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel])", "and len(ip_obj) != 1: return None elif ip_obj is None: return None else:", "'out': f = Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed to get_rels:", "infra connected to ip: # - get ip by value # - get", "x_port=port, x_protocol=protocol), stix_loader) if software is None: software = Software(name=f'{service if service else", "objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) < 1: return None else:", "return getattr(obj, attr_string) elif hasattr(obj, 'extensions'): if attr_string in obj.extensions: return obj.extensions[attr_string] return", "object matched multiple objects! This could cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr,", "ip_obj is None: return None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=',", "== list: for f in filters: fs.add(f) else: fs.add(filters) if direction == 'in':", "= {} for key, value in newList.items(): if key.startswith('x_'): addString = key +", "matched multiple objects! This could cause unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port,", "protocol, service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) > 0 :", "direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet() for key in kwargs: if", "not in stixdict.keys(): stixdict['allow_custom'] = True if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] =", "objs is None: return None elif len(objs) == 1: return objs[0] elif len(objs)", "direction=direction, filters=f) for rel in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction", "continue fs.add(Filter(key, op , kwargs[key])) return fs #TODO HELPER FUNCTION (param = class", "(extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows us to", "rels = get_rels(stix_loader, obj, direction=direction, filters=f) for rel in rels: if direction ==", "= extensions # print(stixdict) # id = '' # print('our type: ', type(SDOType))", "link is missing we need to create that link) # self.ms_source = self.ms.source", "= FilterSet() if not filters is None: if type(filters) == list: for f", "== 'op': continue fs.add(Filter(key, op , kwargs[key])) return fs #TODO HELPER FUNCTION (param", "= Filter('target_ref', '=', obj.id) fs.add() elif direction == 'out': f = Filter('source_ref', '=',", "This could cause unexpected behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader): objs =", "unexpected behavior!') return objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to go", "f = Filter('target_ref', '=', obj.id) fs.add() elif direction == 'out': f = Filter('source_ref',", "= stix_loader.ms.related_to(obj, filters=filt) if len(objs) > 0: return objs else: return None def", "stixdict['allow_custom'] = True if 'spec_version' not in stixdict.keys(): stixdict['spec_version'] = '2.1' s =", "fix_stix(SDOType, stixdict, sdostring): ''' Allows us to fix our dictionary every time we", "stixdict.keys(): stixdict['id'] = gen_uuid(sdostring) if 'allow_custom' not in stixdict.keys(): stixdict['allow_custom'] = True if", "(software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string) elif hasattr(obj,", "multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is None: software = Software(name=f'{service if service", "if software is None: software = Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port,", "stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None:", "from stix2 import Software, Process, IPv4Address, Infrastructure, Relationship, CustomExtension, properties import logging import", "elif SDOType == 'Process': # id = gen_uuid('process') # else: # print('SDO TYPE", "stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string):", "get_related_multi(obj, filt, stix_loader) if objs is None: return None elif len(objs) == 1:", "in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return", "ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None: ip", "ip: # - get ip by value # - get connected by type", "else: return None def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if objs is", "obj, direction=direction, obj_type=obj_type) if len(objs) < 1: return None else: return objs[0] def", "objs else: return None def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj, filt, stix_loader)", "key in kwargs: if key == 'op': continue fs.add(Filter(key, op , kwargs[key])) return", "== 'out': f = Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed to", "None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet()", "None: infra = Infrastructure(name=ip.value) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software =", "our dictionary every time we create STIX Objects to have all custom properties", "objs[0] def get_object_or_create(ip_addr, port, protocol, service, stix_loader): #need to go from ip ->", "gen_uuid('software') # elif sdostring == 'Infrastructure': # id = gen_uuid('infrastructure') # elif SDOType", "of str: {type(addString)}') extensions[addString] = value stixdict.pop(key) stixdict['extensions'] = extensions # print(stixdict) #", "unexpected behavior!') return objs[0] def get_related_multi(obj, filt, stix_loader): objs = stix_loader.ms.related_to(obj, filters=filt) if", "f = Filter('target_ref', 'contains', obj_type) rels = get_rels(stix_loader, obj, direction=direction, filters=f) for rel", "None: return None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs", "addString = key + '_inl' logging.debug(f'type of str: {type(addString)}') extensions[addString] = value stixdict.pop(key)", "obj, direction=direction, filters=f) for rel in rels: if direction == 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif", "for key in kwargs: if key == 'op': continue fs.add(Filter(key, op , kwargs[key]))", "class (software, process,etc), dictionary) #TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl'", "= get_object(multi_filt(type='ipv4-addr', value=ip_addr), stix_loader) if ip is None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra", "#Get infra connected to ip: # - get ip by value # -", "rel]) return (software, ret_objs) def get_stix_attr(obj, attr_string): if hasattr(obj, attr_string): return getattr(obj, attr_string)", "STIX Objects to have all custom properties in an extensions list ''' newList", "#TODO HELPER FUNCTION (param = class (software, process,etc), dictionary) #TODO: returns created object", "extensions list ''' newList = stixdict.copy() extensions = {} for key, value in", "self.ms.source # self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr',", "target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if software is", "ip): ip_obj = stix_loader.ms_source.query(query=Filter('value', '=', ip))[0] if type(ip_obj) == list and len(ip_obj) !=", "obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs) < 1: return None", "software is None: software = Software(name=f'{service if service else f\"{port}/{protocol}\"} Server', x_port=port, x_protocol=protocol,", "uuid4 from stix2.datastore import Filter, FilterSet from stix2 import Software, Process, IPv4Address, Infrastructure,", "elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs", "def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if len(objs)", "id = gen_uuid('software') # elif sdostring == 'Infrastructure': # id = gen_uuid('infrastructure') #", "def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs) > 0 : return objs", "if type(filters) == list: for f in filters: fs.add(f) else: fs.add(filters) if direction", "by type, port, protocol, service def get_objects(filt, stix_loader): objs = stix_loader.ms_source.query(filt) if len(objs)", "# self.ms_sink = self.ms.sink # stix_loader.ms_source ret_objs = [] ip = get_object(multi_filt(type='ipv4-addr', value=ip_addr),", "Relationship(source_ref=infra, relationship_type='has', target_ref=ip) ret_objs.extend([infra, rel]) software = get_related_single(ip, multi_filt(type='software', x_port=port, x_protocol=protocol), stix_loader) if", "get connected by type (infra) # - get connected by type, port, protocol,", "properties in an extensions list ''' newList = stixdict.copy() extensions = {} for", "software (if any link is missing we need to create that link) #", "id = gen_uuid('process') # else: # print('SDO TYPE NOT INF/Software/PROCESS') # print(id) if", "0: return objs else: return None def get_related_single(obj, filt, stix_loader): objs = get_related_multi(obj,", "'=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs) def", "list ''' newList = stixdict.copy() extensions = {} for key, value in newList.items():", "# elif SDOType == 'Process': # id = gen_uuid('process') # else: # print('SDO", "value in newList.items(): if key.startswith('x_'): addString = key + '_inl' logging.debug(f'type of str:", "target_ref=software) # stix_loader.merge([software, rel]) ret_objs.extend([software, rel]) # ret_objs.extend([software, rel]) return (software, ret_objs) def", "object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def fix_stix(SDOType, stixdict, sdostring): ''' Allows us", "x_port=port, x_protocol=protocol, x_service=service, allow_custom=True, id=gen_uuid('software')) rel = Relationship(source_ref=infra, relationship_type='has', target_ref=software) # stix_loader.merge([software, rel])", "Filter('source_ref', '=', obj.id) fs.add() else: logging.error(f'Unexpected direction passed to get_rels: {direction}') return stix_loader.ms_source.query(fs)", "None: ip = IPv4Address(value=ip_addr) ret_objs.append(ip) infra = get_related_single(ip, multi_filt(type='infrastructure'), stix_loader) print(f'get_related_single_infra: {infra}') if", "= [] if not obj_type is None: if direction == 'in': f =", "return f'{string}--{uuid4()}' def get_rels(stix_loader, obj, direction='in', filters=None): fs = FilterSet() if not filters", "None elif ip_obj is None: return None else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure')", "obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet() for key in kwargs: if key", "objs = stix_loader.ms_source.query(filt) if len(objs) > 0 : return objs else: return None", "return objs else: return None def get_object(filt, stix_loader): objs = get_objects(filt, stix_loader) if", "objs = get_objects(filt, stix_loader) if objs is None: return None elif len(objs) ==", "== 'in': l.append(stix_loader.ms_source.get(rel.source_ref)) elif direction == 'out': l.append(stix_loader.ms_source.get(rel.target_ref)) return l def get_connected_obj(stix_loader, obj,", "else: return get_connected_obj(stix_loader, ip_obj, direction='in', obj_type='infrastructure') def multi_filt(op='=', **kwargs): fs = FilterSet() for", "l def get_connected_obj(stix_loader, obj, direction='in', obj_type=None): objs = get_connected_objs(stix_loader, obj, direction=direction, obj_type=obj_type) if", "(software, process,etc), dictionary) #TODO: returns created object (extensions:()) #TODO: dict.keys(startswith(x_)) key.add +'_inl' def" ]
[ "if item%2==0 else True pl = Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd)", "os import sys sys.path.append(os.path.join(sys.path[0], '../')) from smart_pipeline import Pipeline data = [1,2,3,4,5] #", "pl = Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd) res = pl(data) for", "onlyOdd(item): return False if item%2==0 else True pl = Pipeline() # Adds function", "False if item%2==0 else True pl = Pipeline() # Adds function into pipeline", "return False if item%2==0 else True pl = Pipeline() # Adds function into", "# Define a data function def onlyOdd(item): return False if item%2==0 else True", "else True pl = Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd) res =", "# Adds function into pipeline pl.addDataPipe(onlyOdd) res = pl(data) for item in res:", "sys.path.append(os.path.join(sys.path[0], '../')) from smart_pipeline import Pipeline data = [1,2,3,4,5] # Define a data", "Pipeline data = [1,2,3,4,5] # Define a data function def onlyOdd(item): return False", "= [1,2,3,4,5] # Define a data function def onlyOdd(item): return False if item%2==0", "data function def onlyOdd(item): return False if item%2==0 else True pl = Pipeline()", "sys sys.path.append(os.path.join(sys.path[0], '../')) from smart_pipeline import Pipeline data = [1,2,3,4,5] # Define a", "data = [1,2,3,4,5] # Define a data function def onlyOdd(item): return False if", "True pl = Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd) res = pl(data)", "smart_pipeline import Pipeline data = [1,2,3,4,5] # Define a data function def onlyOdd(item):", "item%2==0 else True pl = Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd) res", "Adds function into pipeline pl.addDataPipe(onlyOdd) res = pl(data) for item in res: print(item)", "import os import sys sys.path.append(os.path.join(sys.path[0], '../')) from smart_pipeline import Pipeline data = [1,2,3,4,5]", "= Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd) res = pl(data) for item", "'../')) from smart_pipeline import Pipeline data = [1,2,3,4,5] # Define a data function", "import sys sys.path.append(os.path.join(sys.path[0], '../')) from smart_pipeline import Pipeline data = [1,2,3,4,5] # Define", "a data function def onlyOdd(item): return False if item%2==0 else True pl =", "Define a data function def onlyOdd(item): return False if item%2==0 else True pl", "import Pipeline data = [1,2,3,4,5] # Define a data function def onlyOdd(item): return", "from smart_pipeline import Pipeline data = [1,2,3,4,5] # Define a data function def", "def onlyOdd(item): return False if item%2==0 else True pl = Pipeline() # Adds", "function def onlyOdd(item): return False if item%2==0 else True pl = Pipeline() #", "[1,2,3,4,5] # Define a data function def onlyOdd(item): return False if item%2==0 else", "Pipeline() # Adds function into pipeline pl.addDataPipe(onlyOdd) res = pl(data) for item in" ]
[ "features[0] # for one_feature in features: # # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag", "split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping", "in range(feature_num): # one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag =", "# exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0) # for one_example", "one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq", "[csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32", "# split_tag = tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values=", "one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X =", "= features[0] # for one_feature in features: # # one_feature = tf.reshape(one_feature,shape=[1]) #", "print(tf.shape(res)) import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess: sess.run(tf.global_variables_initializer())", "= tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis = 0)", "tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess:", "current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1)", "in features: # # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") #", "features: split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape )", "for one_feature in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values=", "import tensorflow as tf batch_size = 4 feature_num = 3 csv1 = [", "= {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res", "# one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor(", "features = tf.unstack(one_example,axis = 0) # feature = features[0] # for one_feature in", "0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis = 0) for one_feature in features:", "\"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\",", "for i in range(batch_size): # for j in range(feature_num): # one_feature = X[i][j]", "0) # feature = features[0] # for one_feature in features: # # one_feature", "in example_list: # features = tf.unstack(one_example,axis = 0) # feature = features[0] #", "tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping,", "-0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0) #", "split_tag.values, # dense_shape=split_tag.dense_shape # ) # # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res", "\"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\",", "# feature = features[0] # for one_feature in features: # # one_feature =", "as tf batch_size = 4 feature_num = 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\",", "feature = features[0] # for one_feature in features: # # one_feature = tf.reshape(one_feature,shape=[1])", "current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def", "= tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res", "\"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10,", "# example_list = tf.unstack(exp_X,axis = 0) # for one_example in example_list: # features", "= tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import", "[ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3", "\"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\",", "pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess_res = sess.run([res],feed_dict={X:csv_s})", "tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): # for", "tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with", "] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string)", "tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer,", "pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess_res =", "= tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping =", "3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\",", "# one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape # )", "#用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0) # for", "tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test':", "res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list", "tf.unstack(one_example,axis = 0) # feature = features[0] # for one_feature in features: #", "# for one_example in example_list: # features = tf.unstack(one_example,axis = 0) # feature", "# for one_feature in features: # # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag =", "# for j in range(feature_num): # one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1])", "one_feature in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values,", "\"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4]", "# # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673,", "exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq =", "combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): # for j in range(feature_num):", "tf batch_size = 4 feature_num = 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\",", "res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session()", "#[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis", "\"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X =", "tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0) # for one_example in example_list: #", "{'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features", "= 0) for one_feature in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor(", "= 4 feature_num = 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2", "one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis =", "i in range(batch_size): # for j in range(feature_num): # one_feature = X[i][j] #", "-0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis = 0) for one_feature in features: split_tag", "split_tag = tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values,", "tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices, #", "dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 #", "#除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): # for j in range(feature_num): # one_feature", "tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape", "csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature", "= tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, #", "X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 )", "= tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as", "j in range(feature_num): # one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag", "4 feature_num = 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 =", "one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 #", "= tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1)", "one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape # ) #", "{'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析", ") current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X =", "res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis = 0) for one_feature", "= 0) # for one_example in example_list: # features = tf.unstack(one_example,axis = 0)", "combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法", "X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse =", "split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res", "= [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ]", "# res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess_res = sess.run([res],feed_dict={X:csv_s}) print(type(sess_res))", "one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") #", "= 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\",", "= tf.unstack(exp_X,axis = 0) # for one_example in example_list: # features = tf.unstack(one_example,axis", "] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\",", "dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X", ") # # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388,", "# current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]]", "example_list = tf.unstack(exp_X,axis = 0) # for one_example in example_list: # features =", "one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res))", "# #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list =", "tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res =", "\"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [", "# one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X", "\"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\",", "csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string #", "# for i in range(batch_size): # for j in range(feature_num): # one_feature =", "dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): # for j in range(feature_num): #", "one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32)", "feature_num = 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [", "tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape # ) # # current_mapping", "column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\",", "tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) #", "# values= split_tag.values, # dense_shape=split_tag.dense_shape # ) # # current_mapping = {'zhengquan_test': one_sparse}", "for j in range(feature_num): # one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) #", "features = tf.unstack(one_example,axis = 0) for one_feature in features: split_tag = tf.string_split(one_feature, \"|\")", "0.08187684, 0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis =", "res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess_res = sess.run([res],feed_dict={X:csv_s}) print(type(sess_res)) print(sess_res)", "\"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 =", "= [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature =", "one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature,", "one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res", "= tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res =", "res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size):", "range(batch_size): # for j in range(feature_num): # one_feature = X[i][j] # one_feature =", "= X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse", "[ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket(", "indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape # ) # # current_mapping = {'zhengquan_test':", "= tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape # ) # #", "in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape", "= {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example):", "current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671,", "# one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis", "def my_function(one_example): features = tf.unstack(one_example,axis = 0) for one_feature in features: split_tag =", "tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb", "for one_feature in features: # # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature,", "] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\",", "for one_example in example_list: # features = tf.unstack(one_example,axis = 0) # feature =", "\"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [", "= [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ]", "0.22063671, -0.16549297]] #用unstack证明也是可行的,但是placeholder的第一个dimension不能是None,需要是一个确切的数值,不然unstack函数不能解析 # exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0)", "= tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, #", "# features = tf.unstack(one_example,axis = 0) # feature = features[0] # for one_feature", "batch_size = 4 feature_num = 3 csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ]", "csv1 = [ \"harden|james|curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\",", "import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1]) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess_res", "# dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for", "# initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): # for j", "= tf.unstack(one_example,axis = 0) for one_feature in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse", "csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\"", "my_function(one_example): features = tf.unstack(one_example,axis = 0) for one_feature in features: split_tag = tf.string_split(one_feature,", "\"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 =", "return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() #", "\"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse}", "one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features =", "exp_X = tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0) # for one_example in", "] csv_s= [csv1,csv2,csv3,csv4] X = tf.placeholder(shape=[None,feature_num],dtype=tf.string) one_feature = tf.contrib.layers.sparse_column_with_hash_bucket( column_name=\"zhengquan_test\", hash_bucket_size=10, combiner=\"sum\", dtype=tf.string", "indices=split_tag.indices, values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res)", ") # # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) #", "one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( #", "csv2 = [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\",", "initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): # for j in", "dense_shape=split_tag.dense_shape # ) # # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping,", "# ) # # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res)", "0) # for one_example in example_list: # features = tf.unstack(one_example,axis = 0) #", "[ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4", "values= split_tag.values, # dense_shape=split_tag.dense_shape # ) # # current_mapping = {'zhengquan_test': one_sparse} #", "res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace()", "= 0) # feature = features[0] # for one_feature in features: # #", "= {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684, 0.22063671, -0.16549297]]", ") res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in", "range(feature_num): # one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature,", "# one_feature = X[i][j] # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\")", "# current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[ 0.08187684,", "example_list: # features = tf.unstack(one_example,axis = 0) # feature = features[0] # for", "dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i", "one_example in example_list: # features = tf.unstack(one_example,axis = 0) # feature = features[0]", "# dense_shape=split_tag.dense_shape # ) # # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res =", "{'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return one_feature_embedding_res exp_X = tf.expand_dims(X,axis=-1) res =", "#[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis = 0) for one_feature in", "tf.unstack(one_example,axis = 0) for one_feature in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse =", "# # current_mapping = {'zhengquan_test': one_sparse} # one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) # #[[", "= tf.expand_dims(X,axis=-1) # example_list = tf.unstack(exp_X,axis = 0) # for one_example in example_list:", "tf.unstack(exp_X,axis = 0) # for one_example in example_list: # features = tf.unstack(one_example,axis =", "= tf.unstack(one_example,axis = 0) # feature = features[0] # for one_feature in features:", "one_feature in features: # # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\")", "features: # # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse", "= tf.expand_dims(X,axis=-1) res = tf.map_fn(fn=my_function,elems=exp_X,dtype=tf.float32) print(tf.shape(res)) import pdb pdb.set_trace() # res_seq = tf.squeeze(res,squeeze_dims=[-1])", "in range(batch_size): # for j in range(feature_num): # one_feature = X[i][j] # one_feature", "tensorflow as tf batch_size = 4 feature_num = 3 csv1 = [ \"harden|james|curry\",", "# indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape # ) # # current_mapping =", "[ \"harden|james|curry\", \"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s=", "hash_bucket_size=10, combiner=\"sum\", dtype=tf.string # dtype=tf.dtypes.int32 ) res = tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3)", "= [ \"curry\", \"wrestbrook|harden|durant\", \"paul|towns\", ] csv3 = [ \"harden|james|curry\", \"durant\", \"paul|towns\", ]", "\"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices, # values= split_tag.values, # dense_shape=split_tag.dense_shape #", "# # one_feature = tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse =", "tf.feature_column.input_layer(current_mapping, res) #[[-0.10367388, 0.25915673, -0.00741819]] def my_function(one_example): features = tf.unstack(one_example,axis = 0) for", "0) for one_feature in features: split_tag = tf.string_split(one_feature, \"|\") one_sparse = tf.SparseTensor( indices=split_tag.indices,", "values= split_tag.values, dense_shape=split_tag.dense_shape ) current_mapping = {'zhengquan_test': one_sparse} one_feature_embedding_res = tf.feature_column.input_layer(current_mapping, res) return", "= tf.contrib.layers.embedding_column(one_feature, # initializer=my_initializer, combiner=\"mean\", dimension=3) #除了有下面这种方法还有tf.unstack的方法 # for i in range(batch_size): #", "= tf.reshape(one_feature,shape=[1]) # split_tag = tf.string_split(one_feature, \"|\") # one_sparse = tf.SparseTensor( # indices=split_tag.indices,", "\"durant\", \"paul|towns\", ] csv4 = [ \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\", \"wrestbrook|harden|durant\" ] csv_s= [csv1,csv2,csv3,csv4] X" ]
[ "models class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ] operations = [ migrations.RemoveField(", "on 2020-10-19 11:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "operations = [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField( model_name='product', name='image', field=models.FileField(blank=True, upload_to='Products'), ),", "('products', '0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField( model_name='product', name='image',", "by Django 3.1.2 on 2020-10-19 11:55 from django.db import migrations, models class Migration(migrations.Migration):", "Django 3.1.2 on 2020-10-19 11:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "11:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'),", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ] operations =", "dependencies = [ ('products', '0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product', name='images', ),", "3.1.2 on 2020-10-19 11:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product',", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ] operations", "] operations = [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField( model_name='product', name='image', field=models.FileField(blank=True, upload_to='Products'),", "2020-10-19 11:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('products',", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ]", "<reponame>akshaynot/farmedorganic # Generated by Django 3.1.2 on 2020-10-19 11:55 from django.db import migrations,", "'0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField( model_name='product', name='image', field=models.FileField(blank=True,", "= [ ('products', '0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField(", "migrations, models class Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ] operations = [", "[ ('products', '0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField( model_name='product',", "Generated by Django 3.1.2 on 2020-10-19 11:55 from django.db import migrations, models class", "= [ migrations.RemoveField( model_name='product', name='images', ), migrations.AddField( model_name='product', name='image', field=models.FileField(blank=True, upload_to='Products'), ), ]", "# Generated by Django 3.1.2 on 2020-10-19 11:55 from django.db import migrations, models", "Migration(migrations.Migration): dependencies = [ ('products', '0007_product_detail'), ] operations = [ migrations.RemoveField( model_name='product', name='images'," ]
[ "bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos)", "[TODO] We can load some of these from the session parameter files MODE", "data for exploration relative: whether this is a relative or absolute calculation of", "relative PSD output reverse: whether the level estimations should be descending (default; ie", "a relative or absolute calculation of PSD reverse: whether the level estimations should", "DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append desired psd defined above to the", "ie band increases with attention) or ascending step: how many stimuli between each", "band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data,", "f'Raw Data Path: [{raw_data_path}] \\n') # process and get the data from csv", "power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO] We can load some of these", "# [TODO] We can load some of these from the session parameter files", "data for futher processing. Return: Filtered data & sampling rate \"\"\" notch_filterted_data =", "MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' #", "filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer( feedback_file,", "\"\"\"Create Sequence exports. Loops through segmented data and calculates the PSD sequence data.", "list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 =", "channel to use for PSD calculation plot: whether or not to plot the", "futher processing. Return: Filtered data & sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data,", "this flag to reverse that direction. ' \\ 'Used to calculate appropriate cutoffs", "\\ 'Used to calculate appropriate cutoffs for feedback levels ') parser.add_argument('-export', '--export', required=False,", "data from csv raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path) # print helpful", "# calculate the fields of interest for export for name, band in PSD_TO_DETERMINE:", "import ( power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO] We can load some", "[8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA", "the psd explore function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd", "import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density,", "increases with attention. ' \\ 'Use this flag to reverse that direction. '", "offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH,", "('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00])", "_, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static offset", "for export for name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data,", "data directory of interest.') # parse and define the command line arguments. args", "data_folder = load_experimental_data() channel_index = args.channel plot = args.plot relative_calculation = args.relative reverse", "plot raw data for the trial index given if plot: time = np.arange(", "band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) #", "of these from the session parameter files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt'", "reverse) # plot raw data for the trial index given if plot: time", "should be descending (default; ie band increases with attention) or ascending step: how", "command line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index", "bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with", "= 60 FILTER_HP = 2 FILTER_LP = 40 # Quantile Exports QUANTILES =", "THETA = ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta',", "notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs,", "PSD reverse: whether the level estimations should be descending (default; ie band increases", "to use relative band calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to", "tmp = [] # Calculate PSD for every sequence (called frame here) for", "PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to compute PSD') parser.add_argument('-plot', '--plot', default=False,", "('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12, 25]) THETA = ('theta', [4, 7.99])", "export for name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0)", "whether this is a relative or absolute calculation of PSD reverse: whether the", "= np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [", "helpful information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n'", "= np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data,", "line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to", "segmented data and calculates the PSD sequence data. data: reshaped trial data ['first',", "plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented data and calculates", "exports exports = {} for name, band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data']", "default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to use relative band", "data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use with VR300", "NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP = 40 # Quantile Exports QUANTILES", "Filtered data & sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data", "be descending (default; ie band increases with attention) or ascending export_to_csv: whether or", "45, 70] # PSD Parameters \"\"\"Define bands here and add to PSD_TO_DETERMINE list.\"\"\"", "filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get", "taken from parameters from the session] * we want the PSD from the", "DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP = 40 #", "if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a raw", "parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc',", "# add a static offset of 100 ms [TODO load from parameters] offset", "parameters] offset = offset + .1 # reshape the data x, y, num_seq,", "Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) # write PSD data for name, _", "in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = [] def psd_explore( data_folder, channel_index, plot=True,", "[12, 25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA", "to plot the filtered data and psd spectrum relative: whether or not to", "trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import (", "band increases with attention) or ascending export_to_csv: whether or not to write output", "\\n' f'Using Device: {type_amp} - {fs} samples/sec \\n') # filter the data filtered_data,", "data folder with raw data and triggers channel_index: channel to use for PSD", "Exports QUANTILES = [15, 30, 45, 70] # PSD Parameters \"\"\"Define bands here", "exploration relative: whether this is a relative or absolute calculation of PSD reverse:", "desired psd defined above to the list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1,", "procedure as AD supplement, filter and downsample the data for futher processing. Return:", "args.relative reverse = args.feedback_desc export_to_csv = args.export # ignore some pandas warnings, run", "csv raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path) # print helpful information to", "whether or not to write output to csv returns: average, standard deviation \"\"\"", "9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12, 25]) THETA =", "channels, type_amp, fs = read_data_csv(raw_data_path) # print helpful information to console print( 'DEVICE", "PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy data directory of interest.') parser.add_argument('-feedback_desc',", "[ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES)", "= ('beta', [12, 25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00,", "for name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if", "Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n') #", "help='channel Index to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() == 'true'),", "Define necessary command line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int,", "reverse that direction. ' \\ 'Used to calculate appropriate cutoffs for feedback levels", "append desired psd defined above to the list to use PSD_TO_DETERMINE = [ALPHA,", "# process and get the data from csv raw_data, _, channels, type_amp, fs", "data. data: reshaped trial data ['first', 'second'] num_trials: total number of sequences in", "downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same procedure as AD supplement, filter and", "trial_length \"\"\" index = 0 frames = int(num_trials / step) tmp = []", "load some of these from the session parameter files MODE = 'calibration' TRIGGERS_FN", "NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP", "with VR300 for the AD Feedback experiment. data_folder: path to a BciPy data", "for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False,", "sampling_rate_post_filter, plot, relative, reverse) # plot raw data for the trial index given", "('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12, 25])", "* 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot raw data for", "fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample(", "writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) # write PSD", "trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static offset of", "{QUANTILES}']) # write PSD data for name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'],", "and triggers channel_index: channel to use for PSD calculation plot: whether or not", "plot: whether or not to plot the data for exploration relative: whether this", "= notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2)", "the level estimations should be descending (default; ie band increases with attention) or", "import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy Constants", "tmp.append(process_data) index += step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band,", "the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful", "for PSD calculation plot: whether or not to plot the filtered data and", "not data_folder: data_folder = load_experimental_data() channel_index = args.channel plot = args.plot relative_calculation =", "PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports exports", "exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0),", "channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented", "12.5]) BETA = ('beta', [12, 25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1 =", "np.arange( data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data,", "relative band calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy data", "trial index given if plot: time = np.arange( data.size) / sampling_rate_post_filter fig, ax", "PSD for every sequence (called frame here) for _ in range(frames): process_data =", "a BciPy data folder with raw data and triggers channel_index: channel to use", "for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy data directory of interest.')", "# write headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}'])", "'w') as feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers", "average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data, fs,", "sequences in task (ie 50, 100) channel_index: channel we're interested in extracting trial_length:", "def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"',", "for exploration relative: whether this is a relative or absolute calculation of PSD", "= argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to compute PSD') parser.add_argument('-plot',", "args.channel plot = args.plot relative_calculation = args.relative reverse = args.feedback_desc export_to_csv = args.export", "first stimuli in trial to the trial_length \"\"\" index = 0 frames =", "parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'), help='By default, PSD are assumed", "bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import (", "pandas warnings, run the psd explore function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore')", "axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles']", "'--plot', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to plot raw", "1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw", "data_folder = args.path # Note: this doesn't work on Mac for some reason...", "f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console print('CONFIGURATION:\\n' f'Trial length:", "information to console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]}", "/ sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k')", "+= step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH,", "band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields of interest for", "= 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters", "of interest.') # parse and define the command line arguments. args = parser.parse_args()", "f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n') # process and get", "and psd spectrum relative: whether or not to export relative PSD output reverse:", "'--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'), help='By default, PSD are assumed desceding", "'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH", "csv returns: average, standard deviation \"\"\" # construct the relevant data paths trigger_path", "= args.feedback_desc export_to_csv = args.export # ignore some pandas warnings, run the psd", "raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH}", "Sequence exports. Loops through segmented data and calculates the PSD sequence data. data:", "num_trials: total number of sequences in task (ie 50, 100) channel_index: channel we're", "data sampling rate of EEG plot: whether or not to plot the data", "calculates the PSD sequence data. data: reshaped trial data ['first', 'second'] num_trials: total", "{} for name, band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = [] def", "bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data", "deviation \"\"\" # construct the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path =", "not to write output to csv returns: average, standard deviation \"\"\" # construct", "type=int, help='channel Index to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() ==", "delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min", "average, standard deviation \"\"\" # construct the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}'", "Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60", "{DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}]", "warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task", "trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x,", "/ downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer", "to reverse that direction. ' \\ 'Used to calculate appropriate cutoffs for feedback", "PSD data for name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']]", "step) tmp = [] # Calculate PSD for every sequence (called frame here)", "add a static offset of 100 ms [TODO load from parameters] offset =", "ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2',", "50, 100) channel_index: channel we're interested in extracting trial_length: length of reshaping sampling_rate:", "notch_filter_freqency): \"\"\"Filter Data. Using the same procedure as AD supplement, filter and downsample", "filtered data and psd spectrum relative: whether or not to export relative PSD", "trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static offset of 100", "not to plot the data for exploration relative: whether this is a relative", "# Define necessary command line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6,", "{channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs} samples/sec \\n')", "= ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1,", "x: (str(x).lower() == 'true'), help='By default, PSD are assumed desceding in ' \\", "# explore! psd = psd_explore( data_folder, channel_index, plot=plot, relative=relative_calculation, reverse=reverse, export_to_csv=export_to_csv) print( 'RESULTS:\\n'", "data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and", "helpful information to console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel:", "parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to compute PSD')", "+ .1 # reshape the data x, y, num_seq, _ = trial_reshaper( trigger_targetness,", "not to export relative PSD output reverse: whether the level estimations should be", "= ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20,", "reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented data and calculates the PSD", "to BciPy data directory of interest.') # parse and define the command line", "writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average',", "from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels,", "path to a BciPy data folder with raw data and triggers channel_index: channel", "or not to write output to csv returns: average, standard deviation \"\"\" #", "np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] #", "to the list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1,", "output reverse: whether the level estimations should be descending (default; ie band increases", "help='Path to BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower()", "descending (default; ie band increases with attention) or ascending export_to_csv: whether or not", "get a channel map _, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) #", "supply the path in the console if not data_folder: data_folder = load_experimental_data() channel_index", "as plt import seaborn as sns import warnings from bcipy.helpers.load import read_data_csv from", "'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp}", "the data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers", "parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to BciPy data directory of interest.') #", "fs = read_data_csv(raw_data_path) # print helpful information to console print( 'DEVICE INFO:' f'\\nChannels", "from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO] We can", "= 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2", "to BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() ==", "(str(x).lower() == 'true'), help='Whether or not to plot raw data and PSD') parser.add_argument('-relative',", "attention. ' \\ 'Use this flag to reverse that direction. ' \\ 'Used", "want the PSD from the first stimuli in trial to the trial_length \"\"\"", "f'Quantiles {QUANTILES}']) # write PSD data for name, _ in PSD_TO_DETERMINE: writer.writerow( [name,", "bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition", "calculation plot: whether or not to plot the filtered data and psd spectrum", "returns: average, standard deviation \"\"\" # construct the relevant data paths trigger_path =", "of 100 ms [TODO load from parameters] offset = offset + .1 #", "_ in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index += step for name, band", "PSD output reverse: whether the level estimations should be descending (default; ie band", "Data. Using the same procedure as AD supplement, filter and downsample the data", "[] # Calculate PSD for every sequence (called frame here) for _ in", "k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq * 10, channel_index,", "step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented data and calculates the PSD sequence", "num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through", "to use for PSD calculation plot: whether or not to plot the filtered", "4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot')", "exports[name] = {} exports[name]['data'] = [] def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False,", "quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles", "f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n')", "fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same procedure as AD supplement, filter", "process and get the data from csv raw_data, _, channels, type_amp, fs =", "y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels,", "name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative))", "fields of interest for export for name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data'])", "= np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse:", "raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data =", "np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del", "doesn't work on Mac for some reason... supply the path in the console", "\"\"\" index = 0 frames = int(num_trials / step) tmp = [] #", "in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0)", "exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields", "offset + .1 # reshape the data x, y, num_seq, _ = trial_reshaper(", "or not to plot the filtered data and psd spectrum relative: whether or", "or not to plot the data for exploration relative: whether this is a", "necessary command line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel", "use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports", "import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from", "to write output to csv returns: average, standard deviation \"\"\" # construct the", "trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data =", "2 FILTER_LP = 40 # Quantile Exports QUANTILES = [15, 30, 45, 70]", "the AD Feedback experiment. data_folder: path to a BciPy data folder with raw", "downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports):", "= int(num_trials / step) tmp = [] # Calculate PSD for every sequence", "interest for export for name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] =", "to csv returns: average, standard deviation \"\"\" # construct the relevant data paths", "print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device:", "construct the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print", "= plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(),", "'true'), help='Whether or not to plot raw data and PSD') parser.add_argument('-relative', '--relative', default=False,", "(ie 50, 100) channel_index: channel we're interested in extracting trial_length: length of reshaping", "directory of interest.') # parse and define the command line arguments. args =", "{type_amp} - {fs} samples/sec \\n') # filter the data filtered_data, sampling_rate_post_filter = filter_data(", "if __name__ == '__main__': import argparse # Define necessary command line arguments parser", "max]', f'Quantiles {QUANTILES}']) # write PSD data for name, _ in PSD_TO_DETERMINE: writer.writerow(", "notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs", "from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from", "step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH,", "[ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports exports = {}", "[name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__': import argparse #", "export_to_csv=False): \"\"\"PSD Explore. This assumes use with VR300 for the AD Feedback experiment.", "data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine()", "data: reshaped trial data ['first', 'second'] num_trials: total number of sequences in task", "args.feedback_desc export_to_csv = args.export # ignore some pandas warnings, run the psd explore", "index given if plot: time = np.arange( data.size) / sampling_rate_post_filter fig, ax =", "a channel map _, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add", "channel we're interested in extracting trial_length: length of reshaping sampling_rate: data sampling rate", "sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time", "x: (str(x).lower() == 'true'), help='Whether or not to plot raw data and PSD')", "PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range']", "reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use with VR300 for the AD Feedback", "total number of sequences in task (ie 50, 100) channel_index: channel we're interested", "and get a channel map _, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path)", "the first stimuli in trial to the trial_length \"\"\" index = 0 frames", "'true'), help='Whether or not to use relative band calculation for PSD') parser.add_argument('-path', '--path',", "directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'), help='By default,", "channel_index: channel to use for PSD calculation plot: whether or not to plot", "sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials, channel_index, trial_length,", "exports[name]['quantiles']] ) if __name__ == '__main__': import argparse # Define necessary command line", "channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter,", "7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 =", "extracting trial_length: length of reshaping sampling_rate: data sampling rate of EEG plot: whether", "use relative band calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy", "'__main__': import argparse # Define necessary command line arguments parser = argparse.ArgumentParser(description='Explore PSD.')", "import warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample from", "with attention) or ascending step: how many stimuli between each trial [TODO: this", "10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot raw data for the", "in task (ie 50, 100) channel_index: channel we're interested in extracting trial_length: length", "method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields of interest for export for name,", "3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append desired psd defined above to", "(seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports)", "Using the same procedure as AD supplement, filter and downsample the data for", "'second'] num_trials: total number of sequences in task (ie 50, 100) channel_index: channel", "given if plot: time = np.arange( data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1,", "of PSD reverse: whether the level estimations should be descending (default; ie band", "default=False, type=lambda x: (str(x).lower() == 'true'), help='By default, PSD are assumed desceding in", "if not data_folder: data_folder = load_experimental_data() channel_index = args.channel plot = args.plot relative_calculation", "- {fs} samples/sec \\n') # filter the data filtered_data, sampling_rate_post_filter = filter_data( raw_data,", "== 'true'), help='By default, PSD are assumed desceding in ' \\ 'nature; ie", "PSD from the first stimuli in trial to the trial_length \"\"\" index =", "band increases with attention) or ascending step: how many stimuli between each trial", "for futher processing. Return: Filtered data & sampling rate \"\"\" notch_filterted_data = notch.notch_filter(", "relative: whether or not to export relative PSD output reverse: whether the level", "psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use with", "data ['first', 'second'] num_trials: total number of sequences in task (ie 50, 100)", "downsample the data for futher processing. Return: Filtered data & sampling rate \"\"\"", "in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__':", "QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a raw data average", "plot: time = np.arange( data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12,", "or not to plot raw data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x:", "define the command line arguments. args = parser.parse_args() data_folder = args.path # Note:", "('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append desired psd defined", "plot: whether or not to plot the filtered data and psd spectrum relative:", "files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv'", "[11.5, 12.5]) BETA = ('beta', [12, 25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1", "TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot raw data for the trial index", "paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console", "data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information to", "Index to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether", "quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min max]',", "parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to use", "relative=relative)) # calculate the fields of interest for export for name, band in", "time = np.arange( data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12, 4))", "the trial_length \"\"\" index = 0 frames = int(num_trials / step) tmp =", "/ step) tmp = [] # Calculate PSD for every sequence (called frame", "data[channel_index][index] tmp.append(process_data) index += step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data,", "\"\"\"Filter Data. Using the same procedure as AD supplement, filter and downsample the", "session parameter files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME", "[TODO: this could be taken from parameters from the session] * we want", "be taken from parameters from the session] * we want the PSD from", "FILTER_LP = 40 # Quantile Exports QUANTILES = [15, 30, 45, 70] #", "= trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data", "default=False, type=str, help='Path to BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda", "\\ 'nature; ie PSD increases with attention. ' \\ 'Use this flag to", "ascending step: how many stimuli between each trial [TODO: this could be taken", "line arguments. args = parser.parse_args() data_folder = args.path # Note: this doesn't work", "how many stimuli between each trial [TODO: this could be taken from parameters", "read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load", "PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__': import", "raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path) # print helpful information to console", "or ascending export_to_csv: whether or not to write output to csv returns: average,", "np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0)", "num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot raw data", "and add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1',", "BciPy data folder with raw data and triggers channel_index: channel to use for", "('beta', [12, 25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00])", "\\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n'", "samples/sec \\n') # filter the data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE,", "interest.') # parse and define the command line arguments. args = parser.parse_args() data_folder", "reshaping sampling_rate: data sampling rate of EEG plot: whether or not to plot", "lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show()", "\\n' f'Raw Data Path: [{raw_data_path}] \\n') # process and get the data from", "plot the filtered data and psd spectrum relative: whether or not to export", "exports. Loops through segmented data and calculates the PSD sequence data. data: reshaped", "axis=0) if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return", "of EEG plot: whether or not to plot the data for exploration relative:", "here) for _ in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index += step for", "plt.show() if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate,", "step: how many stimuli between each trial [TODO: this could be taken from", "export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse,", "args = parser.parse_args() data_folder = args.path # Note: this doesn't work on Mac", "a raw data average for plotting purposes only average = np.mean(np.array(tmp), axis=0) if", "the trial index given if plot: time = np.arange( data.size) / sampling_rate_post_filter fig,", "index = 0 frames = int(num_trials / step) tmp = [] # Calculate", "function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder,", "console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ}", "exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] =", "f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs} samples/sec \\n') # filter", "= 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5", "warnings, run the psd explore function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') #", "def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence", "exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if", "required=False, default=False, type=str, help='Path to BciPy data directory of interest.') # parse and", "= {} exports[name]['data'] = [] def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False):", "# Initialize exports exports = {} for name, band in PSD_TO_DETERMINE: exports[name] =", "with attention. ' \\ 'Use this flag to reverse that direction. ' \\", "trial_length: length of reshaping sampling_rate: data sampling rate of EEG plot: whether or", "calculate the fields of interest for export for name, band in PSD_TO_DETERMINE: stats_data", "read_data_csv(raw_data_path) # print helpful information to console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}.", "import argparse # Define necessary command line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel',", "QUANTILES = [15, 30, 45, 70] # PSD Parameters \"\"\"Define bands here and", "cutoffs for feedback levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to BciPy", "feedback levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to BciPy data directory", "write headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) #", "of reshaping sampling_rate: data sampling rate of EEG plot: whether or not to", "raw data and triggers channel_index: channel to use for PSD calculation plot: whether", "write output to csv returns: average, standard deviation \"\"\" # construct the relevant", "trial [TODO: this could be taken from parameters from the session] * we", "bands here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1", "help='Whether or not to plot raw data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda", "export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)", "10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP = 40", "if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot,", "for name, band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = [] def psd_explore(", "this could be taken from parameters from the session] * we want the", "from csv raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path) # print helpful information", "add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00,", "type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot,", "or absolute calculation of PSD reverse: whether the level estimations should be descending", "\"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP,", "the session] * we want the PSD from the first stimuli in trial", "BciPy Constants # [TODO] We can load some of these from the session", "argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to compute PSD') parser.add_argument('-plot', '--plot',", "'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE", "Initialize exports exports = {} for name, band in PSD_TO_DETERMINE: exports[name] = {}", "{channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs} samples/sec \\n') # filter the data", "PSD calculation plot: whether or not to plot the filtered data and psd", "ms [TODO load from parameters] offset = offset + .1 # reshape the", "parse and define the command line arguments. args = parser.parse_args() data_folder = args.path", "some pandas warnings, run the psd explore function and print results with warnings.catch_warnings():", "from bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import", "use with VR300 for the AD Feedback experiment. data_folder: path to a BciPy", "band calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy data directory", "'--channel', default=6, type=int, help='channel Index to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x:", "'--path', default=False, type=str, help='Path to BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False,", "= {} for name, band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = []", "for _ in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index += step for name,", "# print helpful information to console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n'", "reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a raw data", "axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate", "sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented data and", "average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same procedure as", "'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE", "psd explore function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd =", "DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get a channel map _, trigger_targetness, trigger_timing,", "= offset + .1 # reshape the data x, y, num_seq, _ =", "in ' \\ 'nature; ie PSD increases with attention. ' \\ 'Use this", "\\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n'", "name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__", "plot=plot, relative=relative) return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the", "this doesn't work on Mac for some reason... supply the path in the", "ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports exports = {} for name,", "window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields of interest for export for", "= 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10", "'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) # write PSD data for name,", "channel_index = args.channel plot = args.plot relative_calculation = args.relative reverse = args.feedback_desc export_to_csv", "# BciPy Constants # [TODO] We can load some of these from the", "filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq", "downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer =", "help='Path to BciPy data directory of interest.') # parse and define the command", "# plot raw data for the trial index given if plot: time =", "relative=relative) return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same", "Parameters \"\"\"Define bands here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8,", "band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = [] def psd_explore( data_folder, channel_index,", "INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} -", "export_to_csv = args.export # ignore some pandas warnings, run the psd explore function", "loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs} samples/sec", "== 'true'), help='Whether or not to plot raw data and PSD') parser.add_argument('-relative', '--relative',", "interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'), help='By default, PSD are", "= [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports exports =", "stimuli between each trial [TODO: this could be taken from parameters from the", "# Calculate PSD for every sequence (called frame here) for _ in range(frames):", "Loops through segmented data and calculates the PSD sequence data. data: reshaped trial", "fs / downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file:", "== '__main__': import argparse # Define necessary command line arguments parser = argparse.ArgumentParser(description='Explore", "= [15, 30, 45, 70] # PSD Parameters \"\"\"Define bands here and add", "psd defined above to the list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2,", "level estimations should be descending (default; ie band increases with attention) or ascending", "color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if", "plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2)", "trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports(", "stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] =", "analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO] We", "0 frames = int(num_trials / step) tmp = [] # Calculate PSD for", "filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return filtered_data, sampling_rate_post_filter", "data_folder: path to a BciPy data folder with raw data and triggers channel_index:", "and calculates the PSD sequence data. data: reshaped trial data ['first', 'second'] num_trials:", "writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__': import argparse", "__name__ == '__main__': import argparse # Define necessary command line arguments parser =", "# reshape the data x, y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data,", "and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not", "from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task import", "PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to", "import numpy as np import matplotlib.pyplot as plt import seaborn as sns import", "figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data", "load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd", "with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) #", "{fs} samples/sec \\n') # filter the data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs,", "run the psd explore function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore!", "a static offset of 100 ms [TODO load from parameters] offset = offset", "plotting purposes only average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1, 2],", "Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials,", "import matplotlib.pyplot as plt import seaborn as sns import warnings from bcipy.helpers.load import", "[15, 30, 45, 70] # PSD Parameters \"\"\"Define bands here and add to", "'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ", "the filtered data and psd spectrum relative: whether or not to export relative", "whether or not to plot the data for exploration relative: whether this is", "in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index += step for name, band in", "plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def", "QUANTILES) del exports[name]['data'] # calculate a raw data average for plotting purposes only", "default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to plot raw data", "'Use this flag to reverse that direction. ' \\ 'Used to calculate appropriate", "[{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n') # process", "FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate", "parameter files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME =", "whether or not to plot the filtered data and psd spectrum relative: whether", "create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot", "parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda", "relative_calculation = args.relative reverse = args.feedback_desc export_to_csv = args.export # ignore some pandas", "exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__': import argparse # Define necessary", "to console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n'", "PSD sequence data. data: reshaped trial data ['first', 'second'] num_trials: total number of", "= 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP =", "load from parameters] offset = offset + .1 # reshape the data x,", "offset = offset + .1 # reshape the data x, y, num_seq, _", "2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency):", "notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data", "direction. ' \\ 'Used to calculate appropriate cutoffs for feedback levels ') parser.add_argument('-export',", "= [] # Calculate PSD for every sequence (called frame here) for _", "np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data,", "session] * we want the PSD from the first stimuli in trial to", "between each trial [TODO: this could be taken from parameters from the session]", "if plot: time = np.arange( data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1, 1,", "triggers channel_index: channel to use for PSD calculation plot: whether or not to", "= f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console print('CONFIGURATION:\\n' f'Trial", "Quantile Exports QUANTILES = [15, 30, 45, 70] # PSD Parameters \"\"\"Define bands", "data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5,", "trial data ['first', 'second'] num_trials: total number of sequences in task (ie 50,", "def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same procedure as AD", "This assumes use with VR300 for the AD Feedback experiment. data_folder: path to", "use for PSD calculation plot: whether or not to plot the filtered data", "\\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs} samples/sec \\n') #", "list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] #", "THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1',", "f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass", "rate of EEG plot: whether or not to plot the data for exploration", "ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage')", "purposes only average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate,", "fig, ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)')", "import seaborn as sns import warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import", "('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5])", "np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a raw data average for plotting purposes", "default, PSD are assumed desceding in ' \\ 'nature; ie PSD increases with", "plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports", "['first', 'second'] num_trials: total number of sequences in task (ie 50, 100) channel_index:", "Return: Filtered data & sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency)", "FILTER_HP = 2 FILTER_LP = 40 # Quantile Exports QUANTILES = [15, 30,", "RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE =", "calculate appropriate cutoffs for feedback levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path", "is a relative or absolute calculation of PSD reverse: whether the level estimations", "that direction. ' \\ 'Used to calculate appropriate cutoffs for feedback levels ')", "of sequences in task (ie 50, 100) channel_index: channel we're interested in extracting", "export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot, relative,", "descending (default; ie band increases with attention) or ascending step: how many stimuli", "exports[name]['data'] = [] def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore.", "# print helpful information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate:", "4.00]) # append desired psd defined above to the list to use PSD_TO_DETERMINE", "sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields of interest for export", "[min max]', f'Quantiles {QUANTILES}']) # write PSD data for name, _ in PSD_TO_DETERMINE:", "= trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static offset of 100 ms [TODO", "exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__': import argparse # Define", "increases with attention) or ascending export_to_csv: whether or not to write output to", "PSD increases with attention. ' \\ 'Use this flag to reverse that direction.", "\"\"\"Define bands here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99])", "Feedback experiment. data_folder: path to a BciPy data folder with raw data and", "NOTCH_FREQ) # decode triggers and get a channel map _, trigger_targetness, trigger_timing, offset", "in trial to the trial_length \"\"\" index = 0 frames = int(num_trials /", "type=lambda x: (str(x).lower() == 'true'), help='By default, PSD are assumed desceding in '", "output to csv returns: average, standard deviation \"\"\" # construct the relevant data", "from parameters] offset = offset + .1 # reshape the data x, y,", "argparse # Define necessary command line arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel',", "parameters from the session] * we want the PSD from the first stimuli", "frames = int(num_trials / step) tmp = [] # Calculate PSD for every", "information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch", "{TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}]", "5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append", "console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using", "= args.path # Note: this doesn't work on Mac for some reason... supply", "['', 'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) # write PSD data", "[{raw_data_path}] \\n') # process and get the data from csv raw_data, _, channels,", "fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get a channel map _, trigger_targetness,", "( power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO] We can load some of", "mode=MODE, trigger_path=trigger_path) # add a static offset of 100 ms [TODO load from", "= 0 frames = int(num_trials / step) tmp = [] # Calculate PSD", "type=lambda x: (str(x).lower() == 'true'), help='Whether or not to use relative band calculation", "sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get a", "TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv' CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH =", "interested in extracting trial_length: length of reshaping sampling_rate: data sampling rate of EEG", "= [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data,", "data = create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse)", "np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative)", "channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use with VR300 for", "type=str, help='Path to BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x:", "plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv:", "some reason... supply the path in the console if not data_folder: data_folder =", "load_experimental_data() channel_index = args.channel plot = args.plot relative_calculation = args.relative reverse = args.feedback_desc", "axis=0) exports[name]['stdev'] = np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ]", "data and triggers channel_index: channel to use for PSD calculation plot: whether or", "DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append desired", "'--export', required=False, default=False, type=str, help='Path to BciPy data directory of interest.') # parse", "above to the list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA,", "through segmented data and calculates the PSD sequence data. data: reshaped trial data", "as np import matplotlib.pyplot as plt import seaborn as sns import warnings from", "to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize", "\"\"\"PSD Explore. This assumes use with VR300 for the AD Feedback experiment. data_folder:", "sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data, num_trials, channel_index,", "not to plot raw data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower()", "from the first stimuli in trial to the trial_length \"\"\" index = 0", "warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder, channel_index, plot=plot, relative=relative_calculation, reverse=reverse, export_to_csv=export_to_csv)", "f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample", "[4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1", "the data for futher processing. Return: Filtered data & sampling rate \"\"\" notch_filterted_data", "exports def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create", "sequence (called frame here) for _ in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index", "DELTA] # Initialize exports exports = {} for name, band in PSD_TO_DETERMINE: exports[name]", "levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to BciPy data directory of", "(default; ie band increases with attention) or ascending export_to_csv: whether or not to", "x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot raw", "data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or", "\\n') # filter the data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ)", "seaborn as sns import warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass,", "calculation of PSD reverse: whether the level estimations should be descending (default; ie", "as sns import warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass, notch,", "in the console if not data_folder: data_folder = load_experimental_data() channel_index = args.channel plot", "reason... supply the path in the console if not data_folder: data_folder = load_experimental_data()", "100) channel_index: channel we're interested in extracting trial_length: length of reshaping sampling_rate: data", "many stimuli between each trial [TODO: this could be taken from parameters from", "f'\\nChannels loaded: {channels}. \\n' f'Using channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs}", "for plotting purposes only average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1,", "sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter", "not to use relative band calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path", "# write PSD data for name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'],", "path in the console if not data_folder: data_folder = load_experimental_data() channel_index = args.channel", "the list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA]", "channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) # plot raw data for the trial", "_ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] ) if __name__ ==", "method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using", "plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time, data, lw=1.5, color='k') plt.xlabel('Time (seconds)') plt.ylabel('Voltage') plt.xlim([time.min(), time.max()])", "\\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}]", "return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer(", "'nature; ie PSD increases with attention. ' \\ 'Use this flag to reverse", "estimations should be descending (default; ie band increases with attention) or ascending step:", "of interest for export for name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average']", "and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder, channel_index,", "= data[channel_index][index] tmp.append(process_data) index += step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density(", "fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return", "Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n') # process and get the", "(default; ie band increases with attention) or ascending step: how many stimuli between", "trial to the trial_length \"\"\" index = 0 frames = int(num_trials / step)", "\"\"\" # construct the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}'", "100 ms [TODO load from parameters] offset = offset + .1 # reshape", "desceding in ' \\ 'nature; ie PSD increases with attention. ' \\ 'Use", "spectrum relative: whether or not to export relative PSD output reverse: whether the", ".1 # reshape the data x, y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing,", "index += step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate,", "= csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average', 'Standard", "to calculate appropriate cutoffs for feedback levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str,", "= np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot,", "mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq *", "trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE)", "raw data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether", "PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the", "f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger", "( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy Constants #", "plot raw data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() == 'true'),", "plot the data for exploration relative: whether this is a relative or absolute", "to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00])", "(called frame here) for _ in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index +=", "command line arguments. args = parser.parse_args() data_folder = args.path # Note: this doesn't", "args.path # Note: this doesn't work on Mac for some reason... supply the", "offset of 100 ms [TODO load from parameters] offset = offset + .1", "11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA =", "from the session parameter files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN =", "[{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n') # process and get the data", "BETA = ('beta', [12, 25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1',", "= load_experimental_data() channel_index = args.channel plot = args.plot relative_calculation = args.relative reverse =", "to plot the data for exploration relative: whether this is a relative or", "whether the level estimations should be descending (default; ie band increases with attention)", "filter the data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode", "reshaped trial data ['first', 'second'] num_trials: total number of sequences in task (ie", "Explore. This assumes use with VR300 for the AD Feedback experiment. data_folder: path", "reverse: whether the level estimations should be descending (default; ie band increases with", "relative, reverse) # plot raw data for the trial index given if plot:", "bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate)", "data x, y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE,", "' \\ 'nature; ie PSD increases with attention. ' \\ 'Use this flag", "import read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper from", "# parse and define the command line arguments. args = parser.parse_args() data_folder =", "= args.channel plot = args.plot relative_calculation = args.relative reverse = args.feedback_desc export_to_csv =", "\\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path: [{raw_data_path}] \\n') # process and", "num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp),", "'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) # write PSD data for", "type=lambda x: (str(x).lower() == 'true'), help='Whether or not to plot raw data and", "window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data.", "PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to", "type=str, help='Path to BciPy data directory of interest.') # parse and define the", "or ascending step: how many stimuli between each trial [TODO: this could be", "plot=False, relative=relative)) # calculate the fields of interest for export for name, band", "explore! psd = psd_explore( data_folder, channel_index, plot=plot, relative=relative_calculation, reverse=reverse, export_to_csv=export_to_csv) print( 'RESULTS:\\n' f'{psd}')", "of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'), help='By default, PSD", "ie PSD increases with attention. ' \\ 'Use this flag to reverse that", "arguments parser = argparse.ArgumentParser(description='Explore PSD.') parser.add_argument('-channel', '--channel', default=6, type=int, help='channel Index to compute", "with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder, channel_index, plot=plot, relative=relative_calculation, reverse=reverse,", "filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same procedure as AD supplement,", "('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA = ('delta', [1, 3.99])", "[1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append desired psd defined above", "& sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter(", "write PSD data for name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'],", "for the AD Feedback experiment. data_folder: path to a BciPy data folder with", "f'Using Device: {type_amp} - {fs} samples/sec \\n') # filter the data filtered_data, sampling_rate_post_filter", "for some reason... supply the path in the console if not data_folder: data_folder", "[3.00, 5.00]) DELTA = ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) #", "numpy as np import matplotlib.pyplot as plt import seaborn as sns import warnings", "should be descending (default; ie band increases with attention) or ascending export_to_csv: whether", "2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP = 2", "downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder", "plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use with VR300 for the", "be descending (default; ie band increases with attention) or ascending step: how many", "data & sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data =", "as AD supplement, filter and downsample the data for futher processing. Return: Filtered", "{} exports[name]['data'] = [] def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD", "parser.parse_args() data_folder = args.path # Note: this doesn't work on Mac for some", "\\ 'Use this flag to reverse that direction. ' \\ 'Used to calculate", "for every sequence (called frame here) for _ in range(frames): process_data = data[channel_index][index]", "('delta_sub_1', [3.20, 4.00]) # append desired psd defined above to the list to", "csv import numpy as np import matplotlib.pyplot as plt import seaborn as sns", "Calculate PSD for every sequence (called frame here) for _ in range(frames): process_data", "PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = [] def psd_explore( data_folder, channel_index, plot=True, relative=False,", "ignore some pandas warnings, run the psd explore function and print results with", "csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average', 'Standard Deviation',", "= f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n'", "data average for plotting purposes only average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density(", "or not to export relative PSD output reverse: whether the level estimations should", "Device: {type_amp} - {fs} samples/sec \\n') # filter the data filtered_data, sampling_rate_post_filter =", "experiment. data_folder: path to a BciPy data folder with raw data and triggers", "\\n') # process and get the data from csv raw_data, _, channels, type_amp,", "from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import", "# append desired psd defined above to the list to use PSD_TO_DETERMINE =", "name, band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] = [] def psd_explore( data_folder,", "with attention) or ascending export_to_csv: whether or not to write output to csv", "the data for exploration relative: whether this is a relative or absolute calculation", "increases with attention) or ascending step: how many stimuli between each trial [TODO:", "flag to reverse that direction. ' \\ 'Used to calculate appropriate cutoffs for", "number of sequences in task (ie 50, 100) channel_index: channel we're interested in", "FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs /", "_, channels, type_amp, fs = read_data_csv(raw_data_path) # print helpful information to console print(", "x: (str(x).lower() == 'true'), help='Whether or not to use relative band calculation for", "console if not data_folder: data_folder = load_experimental_data() channel_index = args.channel plot = args.plot", "explore function and print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore(", "trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented data", "args.export # ignore some pandas warnings, run the psd explore function and print", "to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency:", "Path: [{raw_data_path}] \\n') # process and get the data from csv raw_data, _,", "supplement, filter and downsample the data for futher processing. Return: Filtered data &", "arguments. args = parser.parse_args() data_folder = args.path # Note: this doesn't work on", "Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports( data,", "* we want the PSD from the first stimuli in trial to the", "= args.plot relative_calculation = args.relative reverse = args.feedback_desc export_to_csv = args.export # ignore", "rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path:", "relative or absolute calculation of PSD reverse: whether the level estimations should be", "bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper", "trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative,", "# calculate a raw data average for plotting purposes only average = np.mean(np.array(tmp),", "ascending export_to_csv: whether or not to write output to csv returns: average, standard", "plt import seaborn as sns import warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter", "60 FILTER_HP = 2 FILTER_LP = 40 # Quantile Exports QUANTILES = [15,", "VR300 for the AD Feedback experiment. data_folder: path to a BciPy data folder", "same procedure as AD supplement, filter and downsample the data for futher processing.", "{NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data Path:", "each trial [TODO: this could be taken from parameters from the session] *", "results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder, channel_index, plot=plot, relative=relative_calculation,", "data_folder: data_folder = load_experimental_data() channel_index = args.channel plot = args.plot relative_calculation = args.relative", "sampling_rate: data sampling rate of EEG plot: whether or not to plot the", "feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['',", "data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'), help='By", "reshape the data x, y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE,", "'--relative', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to use relative", "trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static offset of 100 ms [TODO load", "Constants # [TODO] We can load some of these from the session parameter", "the fields of interest for export for name, band in PSD_TO_DETERMINE: stats_data =", "triggers and get a channel map _, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE,", "and define the command line arguments. args = parser.parse_args() data_folder = args.path #", "power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data,", "reverse = args.feedback_desc export_to_csv = args.export # ignore some pandas warnings, run the", "export relative PSD output reverse: whether the level estimations should be descending (default;", "print results with warnings.catch_warnings(): warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder, channel_index, plot=plot,", "TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP", "sampling rate of EEG plot: whether or not to plot the data for", "'true'), help='By default, PSD are assumed desceding in ' \\ 'nature; ie PSD", "return average def filter_data(raw_data, fs, downsample_rate, notch_filter_freqency): \"\"\"Filter Data. Using the same procedure", "if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average", "= 40 # Quantile Exports QUANTILES = [15, 30, 45, 70] # PSD", "frame here) for _ in range(frames): process_data = data[channel_index][index] tmp.append(process_data) index += step", "ALPHA_SUB_1, ALPHA_SUB_2, BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports exports = {} for", "PSD are assumed desceding in ' \\ 'nature; ie PSD increases with attention.", "headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range [min max]', f'Quantiles {QUANTILES}']) # write", "attention) or ascending export_to_csv: whether or not to write output to csv returns:", "we're interested in extracting trial_length: length of reshaping sampling_rate: data sampling rate of", "from the session] * we want the PSD from the first stimuli in", "sns import warnings from bcipy.helpers.load import read_data_csv from bcipy.signal.process.filter import bandpass, notch, downsample", "] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a", "notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data,", "process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields of interest", "on Mac for some reason... supply the path in the console if not", "def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use", "THETA_SUB_1, DELTA] # Initialize exports exports = {} for name, band in PSD_TO_DETERMINE:", "40 # Quantile Exports QUANTILES = [15, 30, 45, 70] # PSD Parameters", "for feedback levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to BciPy data", "warnings.simplefilter('ignore') # explore! psd = psd_explore( data_folder, channel_index, plot=plot, relative=relative_calculation, reverse=reverse, export_to_csv=export_to_csv) print(", "# filter the data filtered_data, sampling_rate_post_filter = filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) #", "= args.export # ignore some pandas warnings, run the psd explore function and", "the same procedure as AD supplement, filter and downsample the data for futher", "raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get a channel map _,", "= ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5,", "the PSD sequence data. data: reshaped trial data ['first', 'second'] num_trials: total number", "for name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev']", "axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse() exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data']", "only average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH,", "factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME,", "(str(x).lower() == 'true'), help='Whether or not to use relative band calculation for PSD')", "np.std(stats_data, axis=0) exports[name]['range'] = [ np.min(stats_data, axis=0), np.max(stats_data, axis=0) ] if reverse: QUANTILES.reverse()", "matplotlib.pyplot as plt import seaborn as sns import warnings from bcipy.helpers.load import read_data_csv", "= 2 NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP = 40 # Quantile", "= args.relative reverse = args.feedback_desc export_to_csv = args.export # ignore some pandas warnings,", "exports[name]['range'], exports[name]['quantiles']] ) if __name__ == '__main__': import argparse # Define necessary command", "the session parameter files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN = 'raw_data.csv'", "process_data = data[channel_index][index] tmp.append(process_data) index += step for name, band in PSD_TO_DETERMINE: exports[name]['data'].append(", "# PSD Parameters \"\"\"Define bands here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA =", "= ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12, 25]) THETA = ('theta', [4,", "range(frames): process_data = data[channel_index][index] tmp.append(process_data) index += step for name, band in PSD_TO_DETERMINE:", "and downsample the data for futher processing. Return: Filtered data & sampling rate", "offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static offset of 100 ms", "bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO] We can load", "x, y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset,", "BciPy data directory of interest.') parser.add_argument('-feedback_desc', '--feedback_desc', default=False, type=lambda x: (str(x).lower() == 'true'),", "[7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12, 25]) THETA", "bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter =", "data and psd spectrum relative: whether or not to export relative PSD output", "= downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return filtered_data, sampling_rate_post_filter def", "f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw", "ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12, 25]) THETA = ('theta',", "plt.ylabel('Voltage') plt.xlim([time.min(), time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return", "these from the session parameter files MODE = 'calibration' TRIGGERS_FN = 'triggers.txt' RAW_DATA_FN", "filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get a channel map", "[TODO load from parameters] offset = offset + .1 # reshape the data", "bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy", "analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) # BciPy Constants # [TODO]", "2 NOTCH_FREQ = 60 FILTER_HP = 2 FILTER_LP = 40 # Quantile Exports", "some of these from the session parameter files MODE = 'calibration' TRIGGERS_FN =", "[] def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes", "average for plotting purposes only average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average,", "raw data average for plotting purposes only average = np.mean(np.array(tmp), axis=0) if plot:", "the command line arguments. args = parser.parse_args() data_folder = args.path # Note: this", "= create_sequence_exports( x, num_seq * 10, channel_index, TRIAL_LENGTH, sampling_rate_post_filter, plot, relative, reverse) #", "trigger_path=trigger_path) # add a static offset of 100 ms [TODO load from parameters]", "assumed desceding in ' \\ 'nature; ie PSD increases with attention. ' \\", "bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers", "type_amp, fs = read_data_csv(raw_data_path) # print helpful information to console print( 'DEVICE INFO:'", "relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This assumes use with VR300 for the AD", "PSD Parameters \"\"\"Define bands here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha',", "' \\ 'Use this flag to reverse that direction. ' \\ 'Used to", "[3.20, 4.00]) # append desired psd defined above to the list to use", "for the trial index given if plot: time = np.arange( data.size) / sampling_rate_post_filter", "channel_index: channel we're interested in extracting trial_length: length of reshaping sampling_rate: data sampling", "notch, downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import", "= ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta', [12,", "We can load some of these from the session parameter files MODE =", "Mac for some reason... supply the path in the console if not data_folder:", "AD Feedback experiment. data_folder: path to a BciPy data folder with raw data", "# ignore some pandas warnings, run the psd explore function and print results", "the data x, y, num_seq, _ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs,", "in extracting trial_length: length of reshaping sampling_rate: data sampling rate of EEG plot:", "exports[name]['data'] # calculate a raw data average for plotting purposes only average =", "to a BciPy data folder with raw data and triggers channel_index: channel to", "= read_data_csv(raw_data_path) # print helpful information to console print( 'DEVICE INFO:' f'\\nChannels loaded:", "print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n'", "plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports def create_sequence_exports(", "folder with raw data and triggers channel_index: channel to use for PSD calculation", "exports[name]['quantiles'] = np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a raw data average for", "work on Mac for some reason... supply the path in the console if", "absolute calculation of PSD reverse: whether the level estimations should be descending (default;", "Note: this doesn't work on Mac for some reason... supply the path in", "calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str, help='Path to BciPy data directory of", "BciPy data directory of interest.') # parse and define the command line arguments.", "PSD_TYPE) # BciPy Constants # [TODO] We can load some of these from", "length of reshaping sampling_rate: data sampling rate of EEG plot: whether or not", "relative: whether this is a relative or absolute calculation of PSD reverse: whether", "print helpful information to console print('CONFIGURATION:\\n' f'Trial length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE}", "default=6, type=int, help='channel Index to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower()", "not to plot the filtered data and psd spectrum relative: whether or not", "the data from csv raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path) # print", "np import matplotlib.pyplot as plt import seaborn as sns import warnings from bcipy.helpers.load", "length: {TRIAL_LENGTH} \\n' f'Downsample rate: {DOWNSAMPLE_RATE} \\n' f'Notch Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range:", "to compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or", "exports = {} for name, band in PSD_TO_DETERMINE: exports[name] = {} exports[name]['data'] =", "stimuli in trial to the trial_length \"\"\" index = 0 frames = int(num_trials", "ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2 = ('alpha_sub_2', [11.5, 12.5]) BETA = ('beta',", "time.max()]) plt.title('Raw Data Plot') sns.set(font_scale=1.2) sns.despine() plt.show() if export_to_csv: export_data_to_csv(exports) return exports def", "data for name, _ in PSD_TO_DETERMINE: writer.writerow( [name, exports[name]['average'], exports[name]['stdev'], exports[name]['range'], exports[name]['quantiles']] )", "standard deviation \"\"\" # construct the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path", "70] # PSD Parameters \"\"\"Define bands here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA", "plot, relative, reverse) # plot raw data for the trial index given if", "the PSD from the first stimuli in trial to the trial_length \"\"\" index", "order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter = fs / downsample_rate return filtered_data,", "') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to BciPy data directory of interest.')", "this is a relative or absolute calculation of PSD reverse: whether the level", "or not to use relative band calculation for PSD') parser.add_argument('-path', '--path', default=False, type=str,", "trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information to console print('CONFIGURATION:\\n'", "as feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow(", "we want the PSD from the first stimuli in trial to the trial_length", "rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP,", "25]) THETA = ('theta', [4, 7.99]) THETA_SUB_1 = ('theta_sub_1', [3.00, 5.00]) DELTA =", "estimations should be descending (default; ie band increases with attention) or ascending export_to_csv:", "and get the data from csv raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path)", "= np.arange( data.size) / sampling_rate_post_filter fig, ax = plt.subplots(1, 1, figsize=(12, 4)) plt.plot(time,", "default=False, type=str, help='Path to BciPy data directory of interest.') # parse and define", "compute PSD') parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not", "task (ie 50, 100) channel_index: channel we're interested in extracting trial_length: length of", "(str(x).lower() == 'true'), help='By default, PSD are assumed desceding in ' \\ 'nature;", "appropriate cutoffs for feedback levels ') parser.add_argument('-export', '--export', required=False, default=False, type=str, help='Path to", "get the data from csv raw_data, _, channels, type_amp, fs = read_data_csv(raw_data_path) #", "[1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=plot, relative=relative) return average def filter_data(raw_data, fs, downsample_rate,", "channel map _, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a", "sequence data. data: reshaped trial data ['first', 'second'] num_trials: total number of sequences", "= ('delta_sub_1', [3.20, 4.00]) # append desired psd defined above to the list", "export_to_csv: whether or not to write output to csv returns: average, standard deviation", "BETA, THETA, THETA_SUB_1, DELTA] # Initialize exports exports = {} for name, band", "whether or not to export relative PSD output reverse: whether the level estimations", "help='Whether or not to use relative band calculation for PSD') parser.add_argument('-path', '--path', default=False,", "_ = trial_reshaper( trigger_targetness, trigger_timing, filtered_data, mode=MODE, fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH)", "psd spectrum relative: whether or not to export relative PSD output reverse: whether", "attention) or ascending step: how many stimuli between each trial [TODO: this could", "calculate a raw data average for plotting purposes only average = np.mean(np.array(tmp), axis=0)", "the console if not data_folder: data_folder = load_experimental_data() channel_index = args.channel plot =", "args.plot relative_calculation = args.relative reverse = args.feedback_desc export_to_csv = args.export # ignore some", "fs=fs, k=DOWNSAMPLE_RATE, offset=offset, channel_map=analysis_channels(channels, type_amp), trial_length=TRIAL_LENGTH) data = create_sequence_exports( x, num_seq * 10,", "import trial_reshaper from bcipy.helpers.load import load_experimental_data from bcipy.helpers.triggers import trigger_decoder from bcipy.helpers.acquisition import", "# Quantile Exports QUANTILES = [15, 30, 45, 70] # PSD Parameters \"\"\"Define", "Data Path: [{raw_data_path}] \\n') # process and get the data from csv raw_data,", "relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' # print helpful information", "power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate the fields of", "name, band in PSD_TO_DETERMINE: stats_data = np.array(exports[name]['data']) exports[name]['average'] = np.mean(stats_data, axis=0) exports[name]['stdev'] =", "sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs, notch_filter_freqency) bandpass_filtered_data = bandpass.butter_bandpass_filter( notch_filterted_data,", "PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 = ('alpha_sub_1', [7.00, 9.00]) ALPHA_SUB_2", "defined above to the list to use PSD_TO_DETERMINE = [ALPHA, ALPHA_SUB_1, ALPHA_SUB_2, BETA,", "static offset of 100 ms [TODO load from parameters] offset = offset +", "create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports.", "# construct the relevant data paths trigger_path = f'{data_folder}/{TRIGGERS_FN}' raw_data_path = f'{data_folder}/{RAW_DATA_FN}' #", "Frequency: {NOTCH_FREQ} \\n' f'Bandpass Range: [{FILTER_HP}-{FILTER_LP}] \\n' f'Trigger Path: [{trigger_path}] \\n' f'Raw Data", ") if __name__ == '__main__': import argparse # Define necessary command line arguments", "import bandpass, notch, downsample from bcipy.helpers.task import trial_reshaper from bcipy.helpers.load import load_experimental_data from", "decode triggers and get a channel map _, trigger_targetness, trigger_timing, offset = trigger_decoder(", "every sequence (called frame here) for _ in range(frames): process_data = data[channel_index][index] tmp.append(process_data)", "# decode triggers and get a channel map _, trigger_targetness, trigger_timing, offset =", "processing. Return: Filtered data & sampling rate \"\"\" notch_filterted_data = notch.notch_filter( raw_data, fs,", "= np.percentile(stats_data, QUANTILES) del exports[name]['data'] # calculate a raw data average for plotting", "== 'true'), help='Whether or not to use relative band calculation for PSD') parser.add_argument('-path',", "ie band increases with attention) or ascending export_to_csv: whether or not to write", "print helpful information to console print( 'DEVICE INFO:' f'\\nChannels loaded: {channels}. \\n' f'Using", "return exports def create_sequence_exports( data, num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE):", "= bandpass.butter_bandpass_filter( notch_filterted_data, FILTER_HP, FILTER_LP, fs, order=2) filtered_data = downsample.downsample( bandpass_filtered_data, factor=downsample_rate) sampling_rate_post_filter", "parser.add_argument('-plot', '--plot', default=False, type=lambda x: (str(x).lower() == 'true'), help='Whether or not to plot", "help='By default, PSD are assumed desceding in ' \\ 'nature; ie PSD increases", "the path in the console if not data_folder: data_folder = load_experimental_data() channel_index =", "AD supplement, filter and downsample the data for futher processing. Return: Filtered data", "# Note: this doesn't work on Mac for some reason... supply the path", "to export relative PSD output reverse: whether the level estimations should be descending", "= 2 FILTER_LP = 40 # Quantile Exports QUANTILES = [15, 30, 45,", "feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write headers writer.writerow( ['', 'Average', 'Standard Deviation', 'Range", "channel: {channels[channel_index]} \\n' f'Using Device: {type_amp} - {fs} samples/sec \\n') # filter the", "int(num_trials / step) tmp = [] # Calculate PSD for every sequence (called", "to the trial_length \"\"\" index = 0 frames = int(num_trials / step) tmp", "from bcipy.helpers.acquisition import ( analysis_channels, analysis_channel_names_by_pos) from bcipy.signal.process.decomposition.psd import ( power_spectral_density, PSD_TYPE) #", "= fs / downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as", "'Range [min max]', f'Quantiles {QUANTILES}']) # write PSD data for name, _ in", "can load some of these from the session parameter files MODE = 'calibration'", "to plot raw data and PSD') parser.add_argument('-relative', '--relative', default=False, type=lambda x: (str(x).lower() ==", "relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops through segmented data and calculates the", "sampling_rate_post_filter = fs / downsample_rate return filtered_data, sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w')", "data, num_trials, channel_index, trial_length, sampling_rate, plot, relative, reverse, step=NUMBER_OF_STIMULI_PER_SEQUENCE): \"\"\"Create Sequence exports. Loops", "CSV_EXPORT_NAME = 'feedback_exports.csv' # Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE =", "assumes use with VR300 for the AD Feedback experiment. data_folder: path to a", "= ('delta', [1, 3.99]) DELTA_SUB_1 = ('delta_sub_1', [3.20, 4.00]) # append desired psd", "in PSD_TO_DETERMINE: exports[name]['data'].append( power_spectral_density( process_data, band, sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH, plot=False, relative=relative)) # calculate", "from parameters from the session] * we want the PSD from the first", "raw data for the trial index given if plot: time = np.arange( data.size)", "del exports[name]['data'] # calculate a raw data average for plotting purposes only average", "sampling_rate_post_filter def export_data_to_csv(exports): with open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer( feedback_file, delimiter=',',", "'Used to calculate appropriate cutoffs for feedback levels ') parser.add_argument('-export', '--export', required=False, default=False,", "= 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ = 60 FILTER_HP =", "open(CSV_EXPORT_NAME, 'w') as feedback_file: writer = csv.writer( feedback_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) # write", "plot = args.plot relative_calculation = args.relative reverse = args.feedback_desc export_to_csv = args.export #", "import csv import numpy as np import matplotlib.pyplot as plt import seaborn as", "# Parameters TRIAL_LENGTH = 2.5 NUMBER_OF_STIMULI_PER_SEQUENCE = 10 DOWNSAMPLE_RATE = 2 NOTCH_FREQ =", "<filename>python/experiments/feedback/bcipy_psd_explore.py import csv import numpy as np import matplotlib.pyplot as plt import seaborn", "average = np.mean(np.array(tmp), axis=0) if plot: power_spectral_density( average, [1, 2], sampling_rate=sampling_rate, window_length=TRIAL_LENGTH, method=PSD_TYPE.WELCH,", "' \\ 'Used to calculate appropriate cutoffs for feedback levels ') parser.add_argument('-export', '--export',", "= parser.parse_args() data_folder = args.path # Note: this doesn't work on Mac for", "THETA, THETA_SUB_1, DELTA] # Initialize exports exports = {} for name, band in", "= [] def psd_explore( data_folder, channel_index, plot=True, relative=False, reverse=False, export_to_csv=False): \"\"\"PSD Explore. This", "with raw data and triggers channel_index: channel to use for PSD calculation plot:", "map _, trigger_targetness, trigger_timing, offset = trigger_decoder( mode=MODE, trigger_path=trigger_path) # add a static", "30, 45, 70] # PSD Parameters \"\"\"Define bands here and add to PSD_TO_DETERMINE", "EEG plot: whether or not to plot the data for exploration relative: whether", "= filter_data( raw_data, fs, DOWNSAMPLE_RATE, NOTCH_FREQ) # decode triggers and get a channel", "filter and downsample the data for futher processing. Return: Filtered data & sampling", "are assumed desceding in ' \\ 'nature; ie PSD increases with attention. '", "could be taken from parameters from the session] * we want the PSD", "here and add to PSD_TO_DETERMINE list.\"\"\" ALPHA = ('alpha', [8, 11.99]) ALPHA_SUB_1 =", "data and calculates the PSD sequence data. data: reshaped trial data ['first', 'second']", "data for the trial index given if plot: time = np.arange( data.size) /" ]
[ "\"\"\" @file @brief Shortcuts to *testing*. \"\"\" from .dummy_applications import dummy_application, dummy_application_image from", "Shortcuts to *testing*. \"\"\" from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct,", "import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors, dummy_application_neighbors_image from .dummy_applications import dummy_application_auth,", "@file @brief Shortcuts to *testing*. \"\"\" from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications", "from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors, dummy_application_neighbors_image from .dummy_applications", "\"\"\" from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors, dummy_application_neighbors_image from", "@brief Shortcuts to *testing*. \"\"\" from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications import", "dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors, dummy_application_neighbors_image from .dummy_applications import dummy_application_auth, dummy_mlstorage", "*testing*. \"\"\" from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors, dummy_application_neighbors_image", ".dummy_applications import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors, dummy_application_neighbors_image from .dummy_applications import", "to *testing*. \"\"\" from .dummy_applications import dummy_application, dummy_application_image from .dummy_applications import dummy_application_fct, dummy_application_neighbors," ]
[ "global index start_time = time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated all", "You can specify a single index by name, several indices, or leave the", "this as an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts", "style = env.config.styles click.echo() failed = False for index_name in names: if index_name", "if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}' to archive!\"))", "can specify a single index by name, several indices, or leave the 'name'", "echo_line(style.fail(f\"There is no index named '{name}' to remove!\")) return # If we got", "folder is merged into the global\" \"directory as it is.\")) for id_, conflict", "click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd} save_global_index_data(env.global_index) else:", "mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context", "argument blank in order to back up all of them at once. \"\"\"", "start = time.time() for name in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\",", "index folders. You can specify a single index by name, several indices, or", "valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid characters for an", "= time.time() for name in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True))", "in the current directory. This command can be run from anywhere on the", "file has changed since it was last read (this is similar to the", "end_time = time.time() click.echo(style.success(f\" * updated all indices, took {end_time - start_time:0.2f} seconds\"))", "can be run from anywhere on the machine, it does not need to", "However, it's up to the file system to report these values accurately, so", "click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else:", "is similar to the method which rsync uses) However, it's up to the", "'{name}' to remove!\")) return # If we got to this point we can", "shorter \" \"names are faster to type. Think of the index name as", "to this point we can create the index! click.echo() echo_line(style.warning(f\"You are about to", "time.time() click.echo(style.success(f\"Updated all indices with checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\",", "directory with the specified name. \"\"\" style = env.config.styles click.echo() # Check if", "of the index folders. You can specify a single index by name, several", "as an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts which", "'{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name]", "an index here because indexes cannot be contained by other \" \"indexes.\")) for", "leave the 'name' argument blank in order to back up all of them", "modified timestamp to guess at whether the file has changed since it was", "time.time() echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env:", "cached data by looking at the file size and last modified timestamp to", "echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\"", "operations \"\"\" import os import re import sys import time from typing import", "to the global directory.\") return if name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"),", "name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already used by another", "echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now =", "other indexes\")) return # Check if this index would contain another index contained", "for name in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex", "be named with the index name and the current date and time and", "directory. \"\"\" style = env.config.styles click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There is", "its cached data by looking at the file size and last modified timestamp", "deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str)", "current working directory is already part of an index named \" f\"'{env.index_of_cwd.name}'. Indexes", "be contained by other \" \"indexes.\")) for index in contained: echo_line(f\" * {index.name}:", "echo_line(\" * index mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\": echo_line(\"", "with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\"", "create the index! click.echo() echo_line(style.warning(f\"You are about to remove the index named '{name}'\",", "os import re import sys import time from typing import List from zipfile", "or leave the 'name' argument blank in order to back up all of", "DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as", "contained by other \" \"indexes.\")) for index in contained: echo_line(f\" * {index.name}: {index.path}\")", "import re import sys import time from typing import List from zipfile import", "None: # Update the global index start_time = time.time() env.global_index.load_all() end_time = time.time()", "or an alias for the folder you\" \"are adding to the global directory.\")", "if failed: return if not names: echo_line(style.visible(\"No index(s) specified, so zipping all of", "echo_line(style.fail(\"The following already-existing indices are subdirectories of the current working \" \"directory. You", "already part of another index if env.index_of_cwd is not None: echo_line(style.fail(f\"The current working", "index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts which would be", "'{index_name}' to archive!\")) failed = True if failed: return if not names: echo_line(style.visible(\"No", "by compressing them into zip files. The files will be named with the", "to remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\",", "indices in zip files Creates archives of the markdown notes (text files only,", "time.time() for name in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index:", "{end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices", "import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env", "index here because indexes cannot be contained by other \" \"indexes.\")) for index", "in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}' to archive!\")) failed = True", "{c.file_path}\")) return # If we got to this point we can create the", "want to add\") echo_line(\" -> then use the 'mnote index create <name>' command\")", "\"\"\" Archive an index or multiple/all indices in zip files Creates archives of", "used by another index.\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and", "echo_line(\" -> then use the 'mnote index create <name>' command\") sys.exit() else: echo_line(\"", "\"\"\" Commands for index operations \"\"\" import os import re import sys import", "an index name\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes", "names may contain numbers, lowercase letters, and dashes only. Also consider that shorter", "at the file size and timestamps. \"\"\" style = env.config.styles start_time = time.time()", "for index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line()", "\"\"\" import os import re import sys import time from typing import List", "if not names: echo_line(style.visible(\"No index(s) specified, so zipping all of them...\")) names =", "= re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage", "named '{name}' to remove!\")) return # If we got to this point we", "if env.index_of_cwd is not None: echo_line(style.fail(f\"The current working directory is already part of", "named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other indexes\")) return # Check", "looking at the file size and last modified timestamp to guess at whether", "by name, several indices, or leave the 'name' argument blank in order to", "is valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid characters for", "invalid characters for an index name\") click.echo() echo_line(\"Index names may contain numbers, lowercase", "!= \"create\": echo_line(\" * there are \", style.warning(\"no indices\"), \" in the global", "checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive", "all of the files, but slower than simply looking at the file size", "folders. You can specify a single index by name, several indices, or leave", "to the file system to report these values accurately, so this option uses", "env.index_of_cwd is not None: echo_line(style.fail(f\"The current working directory is already part of an", "another index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices are subdirectories", "Check if the name given is valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"),", "is already part of another index if env.index_of_cwd is not None: echo_line(style.fail(f\"The current", "the file has changed since it was last read (this is similar to", "type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an index or", "index! click.echo() echo_line(style.warning(f\"You are about to create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which", "index reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env:", "index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name", "name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}' to remove!\")) return", "the global\" \"directory as it is.\")) for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict", "alias for the folder you\" \"are adding to the global directory.\") return if", "of the indices by compressing them into zip files. The files will be", "them at once. \"\"\" style = env.config.styles click.echo() failed = False for index_name", "\"\"\" style = env.config.styles start_time = time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all", "index mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\": echo_line(\" * there", "name: str): \"\"\" Delete an index from the global directory. \"\"\" style =", "to the folder containing notes you want to add\") echo_line(\" -> then use", "is no index named '{name}' to remove!\")) return # If we got to", "completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild", "index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env", "are faster to type. Think of the index name as a nickname or", "click.echo() # Check if this folder is already part of another index if", "c in conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return # If we", "back up all of them at once. \"\"\" style = env.config.styles click.echo() failed", "env.config.styles click.echo() # Check if this folder is already part of another index", "* \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload'", "# If we got to this point we can create the index! click.echo()", "* there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\") if ctx.invoked_subcommand", "re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes'", "index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}' to archive!\")) failed", "files will be named with the index name and the current date and", "all of them...\")) names = [i.name for i in env.global_index.indices.values()] start = time.time()", "values accurately, so this option uses the SHA1 checksum to rebuild the indicies.", "this point we can create the index! click.echo() echo_line(style.warning(f\"You are about to create", "on the machine, it does not need to be run from inside any", "echo_line(style.fail(\"There are ID conflicts which would be created if this folder is merged", "(this is similar to the method which rsync uses) However, it's up to", "size and last modified timestamp to guess at whether the file has changed", "of another index if env.index_of_cwd is not None: echo_line(style.fail(f\"The current working directory is", "in global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\"))", "of the markdown notes (text files only, no resources) of the indices by", "Archive an index or multiple/all indices in zip files Creates archives of the", "start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\"))", "= env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand", "and saved in the current directory. This command can be run from anywhere", "You can't create an index here because indexes cannot be contained by other", "pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern =", "merged into the global\" \"directory as it is.\")) for id_, conflict in conflicts.items():", "all indices, took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\",", "@click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global", "notes: for note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end =", "datetime as DateTime import click from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from", "style.fail(f\"'{name}'\"), \" contains invalid characters for an index name\") click.echo() echo_line(\"Index names may", "directory\") if ctx.invoked_subcommand is None: # Update the global index start_time = time.time()", "time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices with checksums, took {end_time -", "# Update the global index start_time = time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\"", "in contained: echo_line(f\" * {index.name}: {index.path}\") return # Check if the name given", "directory is already part of an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be", "sys.exit() else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global", "since it was last read (this is similar to the method which rsync", "env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env:", "in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation", "env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle:", "ctx.invoked_subcommand != \"create\": echo_line(\" * there are \", style.warning(\"no indices\"), \" in the", "index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in the folder '{env.cwd}'\", bold=True))", "an index or multiple/all indices in zip files Creates archives of the markdown", "rebuild the indicies. It's faster than re-reading all of the files, but slower", "-> then use the 'mnote index create <name>' command\") sys.exit() else: echo_line(\" *", "to the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted", "faster to type. Think of the index name as a nickname or an", "\"are adding to the global directory.\") return if name in env.global_index.indices: echo_line(\"The name", "guess at whether the file has changed since it was last read (this", "* In this directory: {c.file_path}\")) return # If we got to this point", "seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\"", "to guess at whether the file has changed since it was last read", "the global directory.\") return if name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \"", "\" is already used by another index.\") click.echo() echo_line(\"Index names may contain numbers,", "another index.\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes only.", "Indexes cannot be contained by other indexes\")) return # Check if this index", "if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User", "zip files Creates archives of the markdown notes (text files only, no resources)", "to back up all of them at once. \"\"\" style = env.config.styles click.echo()", "conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return # If we got to", "\"names are faster to type. Think of the index name as a nickname", "we can create the index! click.echo() echo_line(style.warning(f\"You are about to create an index", "for index operations \"\"\" import os import re import sys import time from", "env.config.styles click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}'", "mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment,", "can create the index! click.echo() echo_line(style.warning(f\"You are about to create an index named", "bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\":", "import ZipFile, ZIP_DEFLATED from datetime import datetime as DateTime import click from mnotes.environment", "part of an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other", "a nickname or an alias for the folder you\" \"are adding to the", "Update the global index start_time = time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" *", "indices with checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def", "{index.name}: {index.path}\") return # Check if the name given is valid if valid_chars_pattern.findall(name):", "echo_line(style.warning(f\"You are about to remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to", "by default will verify the integrity of its cached data by looking at", "data by looking at the file size and last modified timestamp to guess", "the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\"))", "echo_line(style.warning(f\"You are about to create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be", "# Check if this index would contain another index contained = env.indices_in_cwd if", "working directory is already part of an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot", "The files will be named with the index name and the current date", "command\") sys.exit() else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the", "rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name: str): \"\"\"", "into zip files. The files will be named with the index name and", "the global directory with the specified name. \"\"\" style = env.config.styles click.echo() #", "to create an index navigate to the folder containing notes you want to", "i in env.global_index.indices.values()] start = time.time() for name in names: echo_line() echo_line(click.style(\"Zipping index", "has changed since it was last read (this is similar to the method", "may contain numbers, lowercase letters, and dashes only. Also consider that shorter \"", "\" \"names are faster to type. Think of the index name as a", "this folder is merged into the global\" \"directory as it is.\")) for id_,", "from anywhere on the machine, it does not need to be run from", "bold=True)) echo_line(style.warning(f\"which will be located in the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply", "for e in conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for c in", "import List from zipfile import ZipFile, ZIP_DEFLATED from datetime import datetime as DateTime", "Indices in Global Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name),", "@main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name: str): \"\"\" Create a new", "are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\") if ctx.invoked_subcommand is None:", "notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed", "echo_line(\"Index names may contain numbers, lowercase letters, and dashes only. Also consider that", "env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already used by another index.\") click.echo()", "another index if env.index_of_cwd is not None: echo_line(style.fail(f\"The current working directory is already", "for the folder you\" \"are adding to the global directory.\") # Check for", "mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import", "not None: echo_line(style.fail(f\"The current working directory is already part of an index named", "to report these values accurately, so this option uses the SHA1 checksum to", "start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for index in env.global_index.indices.values():", "failed = False for index_name in names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There", "{index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\",", "* there are \", style.warning(\"no indices\"), \" in the global directory\") echo_line(\" ->", "for i in env.global_index.indices.values()] start = time.time() for name in names: echo_line() echo_line(click.style(\"Zipping", "echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)):", "at the file size and last modified timestamp to guess at whether the", "with click.progressbar(index.notes.values()) as notes: for note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path),", "= time.time() click.echo(style.success(f\"Updated all indices with checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\")", "env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts which would be created if this", "Creates archives of the markdown notes (text files only, no resources) of the", "MnoteEnvironment, name: str): \"\"\" Delete an index from the global directory. \"\"\" style", "echo_line(\" -> to create an index navigate to the folder containing notes you", "zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an index or multiple/all indices in zip", "file size and timestamps. \"\"\" style = env.config.styles start_time = time.time() env.global_index.load_all(True) end_time", "echo_line(style.fail(f\"There is no index named '{index_name}' to archive!\")) failed = True if failed:", "specified name. \"\"\" style = env.config.styles click.echo() # Check if this folder is", "inside any of the index folders. You can specify a single index by", "def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using checksums. M-Notes by default will", "delete(env: MnoteEnvironment, name: str): \"\"\" Delete an index from the global directory. \"\"\"", "checksums. M-Notes by default will verify the integrity of its cached data by", "to the method which rsync uses) However, it's up to the file system", "timestamps. \"\"\" style = env.config.styles start_time = time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated", "name: str): \"\"\" Create a new index in the global directory with the", "you want to add\") echo_line(\" -> then use the 'mnote index create <name>'", "\" indices in the global directory\") if ctx.invoked_subcommand is None: # Update the", "echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" * In", "DateTime import click from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import", "name as a nickname or an alias for the folder you\" \"are adding", "Check if this index would contain another index contained = env.indices_in_cwd if contained:", "global directory with the specified name. \"\"\" style = env.config.styles click.echo() # Check", "compressing them into zip files. The files will be named with the index", "({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload' to rebuild with checksums)\"))", "this point we can create the index! click.echo() echo_line(style.warning(f\"You are about to remove", "an alias for the folder you\" \"are adding to the global directory.\") #", "List[str]): \"\"\" Archive an index or multiple/all indices in zip files Creates archives", "in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use", "import sys import time from typing import List from zipfile import ZipFile, ZIP_DEFLATED", "= True if failed: return if not names: echo_line(style.visible(\"No index(s) specified, so zipping", "bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index)", "\"are adding to the global directory.\") # Check for conflicts before allowing M-Notes", "be contained by other indexes\")) return # Check if this index would contain", "took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name:", "we can create the index! click.echo() echo_line(style.warning(f\"You are about to remove the index", "rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]):", "the file size and last modified timestamp to guess at whether the file", "indexes cannot be contained by other \" \"indexes.\")) for index in contained: echo_line(f\"", "M-Notes' global directory of indices. Indices represent folders containing indexed notes.\"\"\" style =", "index in contained: echo_line(f\" * {index.name}: {index.path}\") return # Check if the name", "the 'name' argument blank in order to back up all of them at", "be located in the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)):", "an alias for the folder you\" \"are adding to the global directory.\") return", "created if this folder is merged into the global\" \"directory as it is.\"))", "typing import List from zipfile import ZipFile, ZIP_DEFLATED from datetime import datetime as", "of indices. Indices represent folders containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\"", "indices, took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True))", "uses the SHA1 checksum to rebuild the indicies. It's faster than re-reading all", "at once. \"\"\" style = env.config.styles click.echo() failed = False for index_name in", "M-Notes to add this as an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There", "index name\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes only.", "would contain another index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices", "MnoteEnvironment): \"\"\" Rebuild all indices using checksums. M-Notes by default will verify the", "global\" \"directory as it is.\")) for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for", "for the folder you\" \"are adding to the global directory.\") return if name", "and timestamps. \"\"\" style = env.config.styles start_time = time.time() env.global_index.load_all(True) end_time = time.time()", "env.config.styles start_time = time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices with checksums,", "the folder you\" \"are adding to the global directory.\") return if name in", "the indices by compressing them into zip files. The files will be named", "cannot be contained by other \" \"indexes.\")) for index in contained: echo_line(f\" *", "click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd} save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\"))", "folder you\" \"are adding to the global directory.\") return if name in env.global_index.indices:", "contains invalid characters for an index name\") click.echo() echo_line(\"Index names may contain numbers,", "contained: echo_line(style.fail(\"The following already-existing indices are subdirectories of the current working \" \"directory.", "env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand !=", "global directory. \"\"\" style = env.config.styles click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There", "all of them at once. \"\"\" style = env.config.styles click.echo() failed = False", "name, several indices, or leave the 'name' argument blank in order to back", "conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" *", "alias for the folder you\" \"are adding to the global directory.\") # Check", "click.echo() echo_line(style.warning(f\"You are about to create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will", "the indicies. It's faster than re-reading all of the files, but slower than", "click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\",", "In this directory: {c.file_path}\")) return # If we got to this point we", "folder you\" \"are adding to the global directory.\") # Check for conflicts before", "ctx: click.core.Context): \"\"\" Manage M-Notes' global directory of indices. Indices represent folders containing", "failed = True if failed: return if not names: echo_line(style.visible(\"No index(s) specified, so", "here because indexes cannot be contained by other \" \"indexes.\")) for index in", "(use 'mnote index reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env", "from inside any of the index folders. You can specify a single index", "failed: return if not names: echo_line(style.visible(\"No index(s) specified, so zipping all of them...\"))", "end_time = time.time() click.echo(style.success(f\"Updated all indices with checksums, took {end_time - start_time:0.2f} seconds\"))", "this option uses the SHA1 checksum to rebuild the indicies. It's faster than", "NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed in {end", "return # Check if this index would contain another index contained = env.indices_in_cwd", "faster than re-reading all of the files, but slower than simply looking at", "if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd} save_global_index_data(env.global_index)", "already used by another index.\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters,", "len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\": echo_line(\" * there are \", style.warning(\"no", "@main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using checksums. M-Notes by", "the file system to report these values accurately, so this option uses the", "index from the global directory. \"\"\" style = env.config.styles click.echo() if name not", "index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\"", "= False for index_name in names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is", "index named '{name}' to remove!\")) return # If we got to this point", "echo_line(style.visible(\"No index(s) specified, so zipping all of them...\")) names = [i.name for i", "from datetime import datetime as DateTime import click from mnotes.environment import MnoteEnvironment, pass_env,", "as zip_handle: with click.progressbar(index.notes.values()) as notes: for note in notes: note: NoteInfo zip_handle.write(note.file_path,", "{end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name: str):", "them...\")) names = [i.name for i in env.global_index.indices.values()] start = time.time() for name", "Also consider that shorter \" \"names are faster to type. Think of the", "seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using checksums. M-Notes", "zipping all of them...\")) names = [i.name for i in env.global_index.indices.values()] start =", "style = env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices) == 0 and", "no resources) of the indices by compressing them into zip files. The files", "cannot be contained by other indexes\")) return # Check if this index would", "by another index.\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes", "echo_line(style.visible(\" (use 'mnote index reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1)", "folder containing notes you want to add\") echo_line(\" -> then use the 'mnote", "this directory: {c.file_path}\")) return # If we got to this point we can", "click.echo(style.success(f\"Updated all indices with checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str)", "multiple/all indices in zip files Creates archives of the markdown notes (text files", "this index would contain another index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following", "is already part of an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained", "files. The files will be named with the index name and the current", "echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index", "this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd} save_global_index_data(env.global_index) else: click.echo(style.fail(\"User", "in names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}'", "Commands for index operations \"\"\" import os import re import sys import time", "the folder containing notes you want to add\") echo_line(\" -> then use the", "be run from inside any of the index folders. You can specify a", "'{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in the folder '{env.cwd}'\", bold=True)) click.echo() if", "if len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\": echo_line(\" * there are \",", "will be located in the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\",", "index name as a nickname or an alias for the folder you\" \"are", "at whether the file has changed since it was last read (this is", "indices\"), \" in the global directory\") echo_line(\" -> to create an index navigate", "file system to report these values accurately, so this option uses the SHA1", "there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\") if ctx.invoked_subcommand is", "global directory\") if ctx.invoked_subcommand is None: # Update the global index start_time =", "ZipFile, ZIP_DEFLATED from datetime import datetime as DateTime import click from mnotes.environment import", "style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload' to rebuild", "for ID {id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\" * Already in global:", "was last read (this is similar to the method which rsync uses) However,", "is no index named '{index_name}' to archive!\")) failed = True if failed: return", "\", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name =", "as a nickname or an alias for the folder you\" \"are adding to", "the method which rsync uses) However, it's up to the file system to", "the name given is valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains", "characters for an index name\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters,", "machine, it does not need to be run from inside any of the", "before allowing M-Notes to add this as an index conflicts = env.global_index.find_conflicts(env.cwd) if", "adding to the global directory.\") # Check for conflicts before allowing M-Notes to", "similar to the method which rsync uses) However, it's up to the file", "def delete(env: MnoteEnvironment, name: str): \"\"\" Delete an index from the global directory.", "index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True)) click.echo() if", "* index mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\": echo_line(\" *", "are about to remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the", "arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f}", "else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name:", "env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}' to remove!\")) return # If we", "style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\") if ctx.invoked_subcommand is None: # Update", "conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e in conflict.existing:", "ctx.invoked_subcommand is None: # Update the global index start_time = time.time() env.global_index.load_all() end_time", "folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name]", "\", style.fail(f\"'{name}'\"), \" contains invalid characters for an index name\") click.echo() echo_line(\"Index names", "@pass_env def delete(env: MnoteEnvironment, name: str): \"\"\" Delete an index from the global", "the global directory\") if ctx.invoked_subcommand is None: # Update the global index start_time", "NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\")", "the global directory. \"\"\" style = env.config.styles click.echo() if name not in env.global_index.indices:", "if conflicts: echo_line(style.fail(\"There are ID conflicts which would be created if this folder", "with the index name and the current date and time and saved in", "these values accurately, so this option uses the SHA1 checksum to rebuild the", "the file size and timestamps. \"\"\" style = env.config.styles start_time = time.time() env.global_index.load_all(True)", "conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\" *", "there are \", style.warning(\"no indices\"), \" in the global directory\") echo_line(\" -> to", "saved in the current directory. This command can be run from anywhere on", "contained by other indexes\")) return # Check if this index would contain another", "in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all", "does not need to be run from inside any of the index folders.", "- start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for index in", "return # If we got to this point we can create the index!", "echo_line(f\" * {index.name}: {index.path}\") return # Check if the name given is valid", "of them at once. \"\"\" style = env.config.styles click.echo() failed = False for", "bold=True)) index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with", "and last modified timestamp to guess at whether the file has changed since", "name in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex =", "with checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env:", "click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name: str):", "return # Check if the name given is valid if valid_chars_pattern.findall(name): echo_line(\"The name", "style = env.config.styles click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There is no index", "time and saved in the current directory. This command can be run from", "= env.config.styles start_time = time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices with", "you\" \"are adding to the global directory.\") return if name in env.global_index.indices: echo_line(\"The", "containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices)", "\" \"indexes.\")) for index in contained: echo_line(f\" * {index.name}: {index.path}\") return # Check", "point we can create the index! click.echo() echo_line(style.warning(f\"You are about to create an", "the files, but slower than simply looking at the file size and timestamps.", "last read (this is similar to the method which rsync uses) However, it's", "directory. This command can be run from anywhere on the machine, it does", "{e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return #", "files, but slower than simply looking at the file size and timestamps. \"\"\"", "is.\")) for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for", "\"\"\" Create a new index in the global directory with the specified name.", "bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd} save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index", "uses) However, it's up to the file system to report these values accurately,", "looking at the file size and timestamps. \"\"\" style = env.config.styles start_time =", "change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd} save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected", "blank in order to back up all of them at once. \"\"\" style", "directory.\") # Check for conflicts before allowing M-Notes to add this as an", "of an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other indexes\"))", "change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\"))", "if this folder is merged into the global\" \"directory as it is.\")) for", "letters, and dashes only. Also consider that shorter \" \"names are faster to", "output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes:", "following already-existing indices are subdirectories of the current working \" \"directory. You can't", "env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated all indices, took {end_time - start_time:0.2f}", "for c in conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return # If", "Already in global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" * In this directory:", "index(s) specified, so zipping all of them...\")) names = [i.name for i in", "of the files, but slower than simply looking at the file size and", "to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names:", "re import sys import time from typing import List from zipfile import ZipFile,", "archive!\")) failed = True if failed: return if not names: echo_line(style.visible(\"No index(s) specified,", "checksum to rebuild the indicies. It's faster than re-reading all of the files,", "folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del", "names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now", "all indices using checksums. M-Notes by default will verify the integrity of its", "env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\":", "to be run from inside any of the index folders. You can specify", "if the name given is valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \"", "echo_line(\" * there are \", style.warning(\"no indices\"), \" in the global directory\") echo_line(\"", "id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e in", "a new index in the global directory with the specified name. \"\"\" style", "contain another index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices are", "time.time() click.echo(style.success(f\" * updated all indices, took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current", "an index navigate to the folder containing notes you want to add\") echo_line(\"", "env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices with checksums, took {end_time - start_time:0.2f}", "create an index here because indexes cannot be contained by other \" \"indexes.\"))", "= time.time() click.echo(style.success(f\" * updated all indices, took {end_time - start_time:0.2f} seconds\")) click.echo()", "be created if this folder is merged into the global\" \"directory as it", "directory.\") return if name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already", "create an index navigate to the folder containing notes you want to add\")", "@main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an", "click.echo() echo_line(style.warning(f\"You are about to remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps", "echo_line(style.warning(f\"which will be located in the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this", "to rebuild the indicies. It's faster than re-reading all of the files, but", "index or multiple/all indices in zip files Creates archives of the markdown notes", "are ID conflicts which would be created if this folder is merged into", "for conflicts before allowing M-Notes to add this as an index conflicts =", "contain numbers, lowercase letters, and dashes only. Also consider that shorter \" \"names", "than re-reading all of the files, but slower than simply looking at the", "create the index! click.echo() echo_line(style.warning(f\"You are about to create an index named '{name}'\",", "only, no resources) of the indices by compressing them into zip files. The", "name \", style.fail(f\"'{name}'\"), \" is already used by another index.\") click.echo() echo_line(\"Index names", "click.core.Context): \"\"\" Manage M-Notes' global directory of indices. Indices represent folders containing indexed", "several indices, or leave the 'name' argument blank in order to back up", "else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\")", "end = time.time() echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env", "but slower than simply looking at the file size and timestamps. \"\"\" style", "echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment):", "in the global directory with the specified name. \"\"\" style = env.config.styles click.echo()", "indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices) ==", "as it is.\")) for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\",", "the global index start_time = time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated", "\" contains invalid characters for an index name\") click.echo() echo_line(\"Index names may contain", "valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid characters for an index name\")", "bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\")", "\"directory as it is.\")) for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID", "the global directory.\") # Check for conflicts before allowing M-Notes to add this", "integrity of its cached data by looking at the file size and last", "* updated all indices, took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in", "index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other indexes\")) return #", "now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with", "if contained: echo_line(style.fail(\"The following already-existing indices are subdirectories of the current working \"", "click from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from", "option uses the SHA1 checksum to rebuild the indicies. It's faster than re-reading", "@pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an index or multiple/all indices", "index navigate to the folder containing notes you want to add\") echo_line(\" ->", "NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context):", "index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices are subdirectories of", "index_name in names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named", "not need to be run from inside any of the index folders. You", "ZIP_DEFLATED from datetime import datetime as DateTime import click from mnotes.environment import MnoteEnvironment,", "part of another index if env.index_of_cwd is not None: echo_line(style.fail(f\"The current working directory", "# Check if this folder is already part of another index if env.index_of_cwd", "it does not need to be run from inside any of the index", "create(env: MnoteEnvironment, name: str): \"\"\" Create a new index in the global directory", "not names: echo_line(style.visible(\"No index(s) specified, so zipping all of them...\")) names = [i.name", "will be named with the index name and the current date and time", "= time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices with checksums, took {end_time", "already-existing indices are subdirectories of the current working \" \"directory. You can't create", "lowercase letters, and dashes only. Also consider that shorter \" \"names are faster", "simply looking at the file size and timestamps. \"\"\" style = env.config.styles start_time", "name and the current date and time and saved in the current directory.", "the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True)) click.echo()", "ID {id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\"))", "indices, or leave the 'name' argument blank in order to back up all", "if this index would contain another index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The", "for index in contained: echo_line(f\" * {index.name}: {index.path}\") return # Check if the", "the index folders. You can specify a single index by name, several indices,", "conflicts before allowing M-Notes to add this as an index conflicts = env.global_index.find_conflicts(env.cwd)", "current directory. This command can be run from anywhere on the machine, it", "already part of an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by", "nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an index or multiple/all", "conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts which would be created", "- start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using", "named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in the folder '{env.cwd}'\", bold=True)) click.echo()", "the current directory. This command can be run from anywhere on the machine,", "by other \" \"indexes.\")) for index in contained: echo_line(f\" * {index.name}: {index.path}\") return", "return if not names: echo_line(style.visible(\"No index(s) specified, so zipping all of them...\")) names", "def create(env: MnoteEnvironment, name: str): \"\"\" Create a new index in the global", "\" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other indexes\")) return # Check if", "env.config.styles click.echo() failed = False for index_name in names: if index_name not in", "the index name as a nickname or an alias for the folder you\"", "can't create an index here because indexes cannot be contained by other \"", "\", style.fail(f\"'{name}'\"), \" is already used by another index.\") click.echo() echo_line(\"Index names may", "<name>' command\") sys.exit() else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in", "save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment,", "* {index.name}: {index.path}\") return # Check if the name given is valid if", "notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload' to rebuild with checksums)\")) @main.command(name=\"zip\")", "updated all indices, took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global", "the specified name. \"\"\" style = env.config.styles click.echo() # Check if this folder", "named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply", "as DateTime import click from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index", "names: echo_line(style.visible(\"No index(s) specified, so zipping all of them...\")) names = [i.name for", "str): \"\"\" Delete an index from the global directory. \"\"\" style = env.config.styles", "timestamp to guess at whether the file has changed since it was last", "an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in the folder '{env.cwd}'\",", "@click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global directory of", "start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using checksums.", "index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name: str): \"\"\" Create", "global directory.\") # Check for conflicts before allowing M-Notes to add this as", "{end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for index", "conflicts which would be created if this folder is merged into the global\"", "a single index by name, several indices, or leave the 'name' argument blank", "f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload' to rebuild with", "date and time and saved in the current directory. This command can be", "dashes only. Also consider that shorter \" \"names are faster to type. Think", "start_time = time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated all indices, took", "name\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes only. Also", "style = env.config.styles start_time = time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices", "for index_name in names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no index", "(text files only, no resources) of the indices by compressing them into zip", "MnoteEnvironment, name: str): \"\"\" Create a new index in the global directory with", "'name' argument blank in order to back up all of them at once.", "Delete an index from the global directory. \"\"\" style = env.config.styles click.echo() if", "\"indexes.\")) for index in contained: echo_line(f\" * {index.name}: {index.path}\") return # Check if", "ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes: for note in notes: note:", "checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment,", "directory of indices. Indices represent folders containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all()", "if name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already used by", "directory: {c.file_path}\")) return # If we got to this point we can create", "this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index", "= env.config.styles click.echo() failed = False for index_name in names: if index_name not", "= env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices are subdirectories of the current", "from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True)", "to create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in the", "of them...\")) names = [i.name for i in env.global_index.indices.values()] start = time.time() for", "is already used by another index.\") click.echo() echo_line(\"Index names may contain numbers, lowercase", "into the global\" \"directory as it is.\")) for id_, conflict in conflicts.items(): click.echo()", "took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for", "name given is valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid", "echo_line() echo_line(style.visible(\" (use 'mnote index reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str,", "than simply looking at the file size and timestamps. \"\"\" style = env.config.styles", "None: echo_line(style.fail(f\"The current working directory is already part of an index named \"", "@click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an index", "= os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes: for", "time from typing import List from zipfile import ZipFile, ZIP_DEFLATED from datetime import", "up all of them at once. \"\"\" style = env.config.styles click.echo() failed =", "contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices are subdirectories of the", "indices. Indices represent folders containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" *", "add\") echo_line(\" -> then use the 'mnote index create <name>' command\") sys.exit() else:", "for an index name\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and", "the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\"))", "start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name: str): \"\"\" Delete", "M-Notes by default will verify the integrity of its cached data by looking", "it's up to the file system to report these values accurately, so this", "= [i.name for i in env.global_index.indices.values()] start = time.time() for name in names:", "re-reading all of the files, but slower than simply looking at the file", "index would contain another index contained = env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing", "if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid characters for an index", "0 and ctx.invoked_subcommand != \"create\": echo_line(\" * there are \", style.warning(\"no indices\"), \"", "def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global directory of indices. Indices", "default will verify the integrity of its cached data by looking at the", "file size and last modified timestamp to guess at whether the file has", "import click from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex", "to remove!\")) return # If we got to this point we can create", "type=str) @pass_env def create(env: MnoteEnvironment, name: str): \"\"\" Create a new index in", "order to back up all of them at once. \"\"\" style = env.config.styles", "Check for conflicts before allowing M-Notes to add this as an index conflicts", "Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes):", "If we got to this point we can create the index! click.echo() echo_line(style.warning(f\"You", "up to the file system to report these values accurately, so this option", "to add this as an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are", "MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global directory of indices. Indices represent folders", "\" in the global directory\") echo_line(\" -> to create an index navigate to", "click.echo() echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\" *", "\"\"\" Delete an index from the global directory. \"\"\" style = env.config.styles click.echo()", "as notes: for note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end", "if ctx.invoked_subcommand is None: # Update the global index start_time = time.time() env.global_index.load_all()", "and time and saved in the current directory. This command can be run", "system to report these values accurately, so this option uses the SHA1 checksum", "Indices represent folders containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" * index", "click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\" * Already", "seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name: str): \"\"\" Delete an", "env.indices_in_cwd if contained: echo_line(style.fail(\"The following already-existing indices are subdirectories of the current working", "contained: echo_line(f\" * {index.name}: {index.path}\") return # Check if the name given is", "the current date and time and saved in the current directory. This command", "click.echo(style.success(f\" * updated all indices, took {end_time - start_time:0.2f} seconds\")) click.echo() echo_line(click.style(\"Current Indices", "read (this is similar to the method which rsync uses) However, it's up", "List from zipfile import ZipFile, ZIP_DEFLATED from datetime import datetime as DateTime import", "valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\"", "{index.path}\") return # Check if the name given is valid if valid_chars_pattern.findall(name): echo_line(\"The", "to the global directory.\") # Check for conflicts before allowing M-Notes to add", "which would be created if this folder is merged into the global\" \"directory", "indicies. It's faster than re-reading all of the files, but slower than simply", "index! click.echo() echo_line(style.warning(f\"You are about to remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which", "[i.name for i in env.global_index.indices.values()] start = time.time() for name in names: echo_line()", "mode\") if len(env.global_index.indices) == 0 and ctx.invoked_subcommand != \"create\": echo_line(\" * there are", "name \", style.fail(f\"'{name}'\"), \" contains invalid characters for an index name\") click.echo() echo_line(\"Index", "in the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created", "global directory\") echo_line(\" -> to create an index navigate to the folder containing", "note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed in", "style.fail(f\"'{name}'\"), \" is already used by another index.\") click.echo() echo_line(\"Index names may contain", "in conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\"", "= time.time() echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\") @pass_env def", "indices in the global directory\") if ctx.invoked_subcommand is None: # Update the global", "it was last read (this is similar to the method which rsync uses)", "you\" \"are adding to the global directory.\") # Check for conflicts before allowing", "env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}' to archive!\")) failed = True if", "by looking at the file size and last modified timestamp to guess at", "'{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this", "archives of the markdown notes (text files only, no resources) of the indices", "consider that shorter \" \"names are faster to type. Think of the index", "This command can be run from anywhere on the machine, it does not", "and ctx.invoked_subcommand != \"create\": echo_line(\" * there are \", style.warning(\"no indices\"), \" in", "use the 'mnote index create <name>' command\") sys.exit() else: echo_line(\" * there are", "anywhere on the machine, it does not need to be run from inside", "== 0 and ctx.invoked_subcommand != \"create\": echo_line(\" * there are \", style.warning(\"no indices\"),", "\"create\": echo_line(\" * there are \", style.warning(\"no indices\"), \" in the global directory\")", "is None: # Update the global index start_time = time.time() env.global_index.load_all() end_time =", "zip files. The files will be named with the index name and the", "in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already used by another index.\")", "files Creates archives of the markdown notes (text files only, no resources) of", "subdirectories of the current working \" \"directory. You can't create an index here", "of the current working \" \"directory. You can't create an index here because", "an index from the global directory. \"\"\" style = env.config.styles click.echo() if name", "if this folder is already part of another index if env.index_of_cwd is not", "last modified timestamp to guess at whether the file has changed since it", "got to this point we can create the index! click.echo() echo_line(style.warning(f\"You are about", "the global directory\") echo_line(\" -> to create an index navigate to the folder", "global directory.\") return if name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is", "os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes: for note", "\"\"\" style = env.config.styles click.echo() # Check if this folder is already part", "the 'mnote index create <name>' command\") sys.exit() else: echo_line(\" * there are \",", "folder is already part of another index if env.index_of_cwd is not None: echo_line(style.fail(f\"The", "adding to the global directory.\") return if name in env.global_index.indices: echo_line(\"The name \",", "zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed in {end -", "with the specified name. \"\"\" style = env.config.styles click.echo() # Check if this", "an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts which would", "@click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name: str): \"\"\" Delete an index from", "datetime import datetime as DateTime import click from mnotes.environment import MnoteEnvironment, pass_env, echo_line,", "index if env.index_of_cwd is not None: echo_line(style.fail(f\"The current working directory is already part", "add this as an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID", "allowing M-Notes to add this as an index conflicts = env.global_index.find_conflicts(env.cwd) if conflicts:", "index create <name>' command\") sys.exit() else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \"", "point we can create the index! click.echo() echo_line(style.warning(f\"You are about to remove the", "global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return", "remove!\")) return # If we got to this point we can create the", "indices using checksums. M-Notes by default will verify the integrity of its cached", "changed since it was last read (this is similar to the method which", "click.echo() failed = False for index_name in names: if index_name not in env.global_index.indices:", "to add\") echo_line(\" -> then use the 'mnote index create <name>' command\") sys.exit()", "creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name: str): \"\"\" Create a", "indices by compressing them into zip files. The files will be named with", "-> to create an index navigate to the folder containing notes you want", "and dashes only. Also consider that shorter \" \"names are faster to type.", "reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using checksums. M-Notes by default will verify", "notes you want to add\") echo_line(\" -> then use the 'mnote index create", "@pass_env def reload(env: MnoteEnvironment): \"\"\" Rebuild all indices using checksums. M-Notes by default", "'mnote index create <name>' command\") sys.exit() else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"),", "start_time = time.time() env.global_index.load_all(True) end_time = time.time() click.echo(style.success(f\"Updated all indices with checksums, took", "in conflict.conflicting: echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return # If we got", "in the global directory\") if ctx.invoked_subcommand is None: # Update the global index", "f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes: for note in", "index.\") click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes only. Also", "to type. Think of the index name as a nickname or an alias", "SHA1 checksum to rebuild the indicies. It's faster than re-reading all of the", "markdown notes (text files only, no resources) of the indices by compressing them", "the index name and the current date and time and saved in the", "# Check if the name given is valid if valid_chars_pattern.findall(name): echo_line(\"The name \",", "navigate to the folder containing notes you want to add\") echo_line(\" -> then", "echo_line(style.fail(f\"The current working directory is already part of an index named \" f\"'{env.index_of_cwd.name}'.", "the current working \" \"directory. You can't create an index here because indexes", "echo_line(click.style(\"Current Indices in Global Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\" * \",", "import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx:", "current working \" \"directory. You can't create an index here because indexes cannot", "It's faster than re-reading all of the files, but slower than simply looking", "type. Think of the index name as a nickname or an alias for", "note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time() echo_line()", "of its cached data by looking at the file size and last modified", "numbers, lowercase letters, and dashes only. Also consider that shorter \" \"names are", "single index by name, several indices, or leave the 'name' argument blank in", "not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}' to remove!\")) return #", "# Check for conflicts before allowing M-Notes to add this as an index", "'{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] =", "= env.config.styles click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named", "index operations \"\"\" import os import re import sys import time from typing", "no index named '{index_name}' to archive!\")) failed = True if failed: return if", "accurately, so this option uses the SHA1 checksum to rebuild the indicies. It's", "= DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values())", "\"directory. You can't create an index here because indexes cannot be contained by", "run from inside any of the index folders. You can specify a single", "in order to back up all of them at once. \"\"\" style =", "compress_type=ZIP_DEFLATED) end = time.time() echo_line() echo_line(style.success(f\"Operation completed in {end - start:0.1f} seconds\")) @main.command(name=\"reload\")", "working \" \"directory. You can't create an index here because indexes cannot be", "for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e", "because indexes cannot be contained by other \" \"indexes.\")) for index in contained:", "the integrity of its cached data by looking at the file size and", "maps to the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User", "only. Also consider that shorter \" \"names are faster to type. Think of", "notes (text files only, no resources) of the indices by compressing them into", "\", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote index reload' to", "\"\"\" style = env.config.styles click.echo() failed = False for index_name in names: if", "conflicts: echo_line(style.fail(\"There are ID conflicts which would be created if this folder is", "index named '{index_name}' to archive!\")) failed = True if failed: return if not", "zip_handle: with click.progressbar(index.notes.values()) as notes: for note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path,", "{id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for", "global directory of indices. Indices represent folders containing indexed notes.\"\"\" style = env.config.styles", "index name and the current date and time and saved in the current", "the index! click.echo() echo_line(style.warning(f\"You are about to create an index named '{name}'\", bold=True))", "notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if len(env.global_index.indices) == 0", "import os import re import sys import time from typing import List from", "click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}' to", "containing notes you want to add\") echo_line(\" -> then use the 'mnote index", "return if name in env.global_index.indices: echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already used", "by other indexes\")) return # Check if this index would contain another index", "= time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated all indices, took {end_time", "method which rsync uses) However, it's up to the file system to report", "verify the integrity of its cached data by looking at the file size", "names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}' to", "Create a new index in the global directory with the specified name. \"\"\"", "an index named \" f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other indexes\")) return", "click.progressbar(index.notes.values()) as notes: for note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED)", "str): \"\"\" Create a new index in the global directory with the specified", "specified, so zipping all of them...\")) names = [i.name for i in env.global_index.indices.values()]", "no index named '{name}' to remove!\")) return # If we got to this", "echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid characters for an index name\") click.echo()", "style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\")", "is not None: echo_line(style.fail(f\"The current working directory is already part of an index", "bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\",", "the machine, it does not need to be run from inside any of", "specify a single index by name, several indices, or leave the 'name' argument", "about to create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in", "need to be run from inside any of the index folders. You can", "once. \"\"\" style = env.config.styles click.echo() failed = False for index_name in names:", "located in the folder '{env.cwd}'\", bold=True)) click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User", "names: List[str]): \"\"\" Archive an index or multiple/all indices in zip files Creates", "names = [i.name for i in env.global_index.indices.values()] start = time.time() for name in", "echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")", "other \" \"indexes.\")) for index in contained: echo_line(f\" * {index.name}: {index.path}\") return #", "in zip files Creates archives of the markdown notes (text files only, no", "nickname or an alias for the folder you\" \"are adding to the global", "echo_line(style.warning(f\" * In this directory: {c.file_path}\")) return # If we got to this", "import time from typing import List from zipfile import ZipFile, ZIP_DEFLATED from datetime", "directory\") echo_line(\" -> to create an index navigate to the folder containing notes", "def zip_cmd(env: MnoteEnvironment, names: List[str]): \"\"\" Archive an index or multiple/all indices in", "we got to this point we can create the index! click.echo() echo_line(style.warning(f\"You are", "not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{index_name}' to archive!\")) failed =", "with ZipFile(output_name, \"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes: for note in notes:", "style = env.config.styles click.echo() # Check if this folder is already part of", "or multiple/all indices in zip files Creates archives of the markdown notes (text", "is merged into the global\" \"directory as it is.\")) for id_, conflict in", "named with the index name and the current date and time and saved", "rsync uses) However, it's up to the file system to report these values", "reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def zip_cmd(env: MnoteEnvironment,", "slower than simply looking at the file size and timestamps. \"\"\" style =", "echo_line(\"The name \", style.fail(f\"'{name}'\"), \" is already used by another index.\") click.echo() echo_line(\"Index", "\"\"\" Manage M-Notes' global directory of indices. Indices represent folders containing indexed notes.\"\"\"", "from zipfile import ZipFile, ZIP_DEFLATED from datetime import datetime as DateTime import click", "env.global_index.indices.values()] start = time.time() for name in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True),", "for note in notes: note: NoteInfo zip_handle.write(note.file_path, arcname=os.path.relpath(note.file_path, start=index.path), compress_type=ZIP_DEFLATED) end = time.time()", "time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated all indices, took {end_time -", "\", style.warning(\"no indices\"), \" in the global directory\") echo_line(\" -> to create an", "so zipping all of them...\")) names = [i.name for i in env.global_index.indices.values()] start", "the folder you\" \"are adding to the global directory.\") # Check for conflicts", "env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\") echo_line() echo_line(style.visible(\" (use 'mnote", "in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\"", "files only, no resources) of the indices by compressing them into zip files.", "@pass_env def create(env: MnoteEnvironment, name: str): \"\"\" Create a new index in the", "then use the 'mnote index create <name>' command\") sys.exit() else: echo_line(\" * there", "in Global Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\"", "can create the index! click.echo() echo_line(style.warning(f\"You are about to remove the index named", "create <name>' command\") sys.exit() else: echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices", "and the current date and time and saved in the current directory. This", "whether the file has changed since it was last read (this is similar", "echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\")", "to archive!\")) failed = True if failed: return if not names: echo_line(style.visible(\"No index(s)", "@pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global directory of indices.", "the SHA1 checksum to rebuild the indicies. It's faster than re-reading all of", "click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User deleted index\")) del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected", "are about to create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located", "invoke_without_command=True) @click.pass_context @pass_env def main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global directory", "* Already in global: {e.file_path}\")) for c in conflict.conflicting: echo_line(style.warning(f\" * In this", "would be created if this folder is merged into the global\" \"directory as", "so this option uses the SHA1 checksum to rebuild the indicies. It's faster", "in names: echo_line() echo_line(click.style(\"Zipping index \", bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name]", "represent folders containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" * index mode\")", "all indices with checksums, took {end_time - start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env", "the markdown notes (text files only, no resources) of the indices by compressing", "bold=True)) for index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)} notes): {index.path}\")", "will verify the integrity of its cached data by looking at the file", "command can be run from anywhere on the machine, it does not need", "indexes\")) return # Check if this index would contain another index contained =", "if name not in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}' to remove!\"))", "Check if this folder is already part of another index if env.index_of_cwd is", "index by name, several indices, or leave the 'name' argument blank in order", "from typing import List from zipfile import ZipFile, ZIP_DEFLATED from datetime import datetime", "= env.config.styles click.echo() # Check if this folder is already part of another", "True if failed: return if not names: echo_line(style.visible(\"No index(s) specified, so zipping all", "import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo", "\"\"\" Rebuild all indices using checksums. M-Notes by default will verify the integrity", "them into zip files. The files will be named with the index name", "f\"'{env.index_of_cwd.name}'. Indexes cannot be contained by other indexes\")) return # Check if this", "in the global directory\") echo_line(\" -> to create an index navigate to the", "index start_time = time.time() env.global_index.load_all() end_time = time.time() click.echo(style.success(f\" * updated all indices,", "are \", style.warning(\"no indices\"), \" in the global directory\") echo_line(\" -> to create", "be run from anywhere on the machine, it does not need to be", "@main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name: str): \"\"\" Delete an index", "bold=True)) for e in conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for c", "from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def main(env:", "using checksums. M-Notes by default will verify the integrity of its cached data", "that shorter \" \"names are faster to type. Think of the index name", "from the global directory. \"\"\" style = env.config.styles click.echo() if name not in", "@click.argument(\"name\", type=str) @pass_env def create(env: MnoteEnvironment, name: str): \"\"\" Create a new index", "Global Directory:\", bold=True)) for index in env.global_index.indices.values(): echo_line(\" * \", style.visible(index.name), f\" ({len(index.notes)}", "type=str) @pass_env def delete(env: MnoteEnvironment, name: str): \"\"\" Delete an index from the", "from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes", "echo_line(\" * there are \", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\") if", "in env.global_index.indices.values()] start = time.time() for name in names: echo_line() echo_line(click.style(\"Zipping index \",", "del env.global_index.index_directory[name] save_global_index_data(env.global_index) else: click.echo(style.fail(\"User rejected index creation\")) @main.command(name=\"create\") @click.argument(\"name\", type=str) @pass_env def", "ID conflicts which would be created if this folder is merged into the", "save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\",", "MnoteEnvironment, names: List[str]): \"\"\" Archive an index or multiple/all indices in zip files", "folders containing indexed notes.\"\"\" style = env.config.styles env.global_index.load_all() echo_line(\" * index mode\") if", "Rebuild all indices using checksums. M-Notes by default will verify the integrity of", "remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder '{env.cwd}'\", bold=True))", "index in the global directory with the specified name. \"\"\" style = env.config.styles", "named '{index_name}' to archive!\")) failed = True if failed: return if not names:", "current date and time and saved in the current directory. This command can", "the index! click.echo() echo_line(style.warning(f\"You are about to remove the index named '{name}'\", bold=True))", "NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern = re.compile(r\"[^a-z0-9\\-]\") @click.group(name=\"index\", invoke_without_command=True) @click.pass_context @pass_env def", "\" \"directory. You can't create an index here because indexes cannot be contained", "sys import time from typing import List from zipfile import ZipFile, ZIP_DEFLATED from", "click.echo() echo_line(\"Index names may contain numbers, lowercase letters, and dashes only. Also consider", "resources) of the indices by compressing them into zip files. The files will", "name. \"\"\" style = env.config.styles click.echo() # Check if this folder is already", "any of the index folders. You can specify a single index by name,", "new index in the global directory with the specified name. \"\"\" style =", "this folder is already part of another index if env.index_of_cwd is not None:", "indices are subdirectories of the current working \" \"directory. You can't create an", "Think of the index name as a nickname or an alias for the", "main(env: MnoteEnvironment, ctx: click.core.Context): \"\"\" Manage M-Notes' global directory of indices. Indices represent", "report these values accurately, so this option uses the SHA1 checksum to rebuild", "bold=True), style.visible(f\"'{name}'\", bold=True)) index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd,", "'mnote index reload' to rebuild with checksums)\")) @main.command(name=\"zip\") @click.argument(\"names\", type=str, nargs=-1) @pass_env def", "size and timestamps. \"\"\" style = env.config.styles start_time = time.time() env.global_index.load_all(True) end_time =", "given is valid if valid_chars_pattern.findall(name): echo_line(\"The name \", style.fail(f\"'{name}'\"), \" contains invalid characters", "e in conflict.existing: echo_line(style.visible(f\" * Already in global: {e.file_path}\")) for c in conflict.conflicting:", "run from anywhere on the machine, it does not need to be run", "= env.global_index.find_conflicts(env.cwd) if conflicts: echo_line(style.fail(\"There are ID conflicts which would be created if", "= env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name, \"w\") as", "create an index named '{name}'\", bold=True)) echo_line(style.warning(f\"which will be located in the folder", "Manage M-Notes' global directory of indices. Indices represent folders containing indexed notes.\"\"\" style", "\", style.visible(f\"{len(env.global_index.indices)}\"), \" indices in the global directory\") if ctx.invoked_subcommand is None: #", "- start_time:0.2f} seconds\")) @main.command(name=\"delete\") @click.argument(\"name\", type=str) @pass_env def delete(env: MnoteEnvironment, name: str): \"\"\"", "index: NoteIndex = env.global_index.indices[name] now = DateTime.now().strftime(\"%Y-%m-%d-%H-%M-%S\") output_name = os.path.join(env.cwd, f\"{name}-{now}.zip\") with ZipFile(output_name,", "False for index_name in names: if index_name not in env.global_index.indices: echo_line(style.fail(f\"There is no", "\"\"\" style = env.config.styles click.echo() if name not in env.global_index.indices: echo_line(style.fail(f\"There is no", "it is.\")) for id_, conflict in conflicts.items(): click.echo() echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True))", "echo_line(style.warning(f\"Conflict for ID {id_}:\", bold=True)) for e in conflict.existing: echo_line(style.visible(f\" * Already in", "are subdirectories of the current working \" \"directory. You can't create an index", "which rsync uses) However, it's up to the file system to report these", "click.echo() if click.confirm(click.style(f\"Apply this change?\", bold=True)): click.echo(style.success(\"User created index\")) env.global_index.index_directory[name] = {\"path\": env.cwd}", "style.warning(\"no indices\"), \" in the global directory\") echo_line(\" -> to create an index", "MnoteEnvironment, pass_env, echo_line, save_global_index_data from mnotes.notes.index import NoteIndex from mnotes.notes.markdown_notes import NoteInfo valid_chars_pattern", "\"w\") as zip_handle: with click.progressbar(index.notes.values()) as notes: for note in notes: note: NoteInfo", "about to remove the index named '{name}'\", bold=True)) echo_line(style.warning(f\"which maps to the folder", "zipfile import ZipFile, ZIP_DEFLATED from datetime import datetime as DateTime import click from", "in env.global_index.indices: echo_line(style.fail(f\"There is no index named '{name}' to remove!\")) return # If", "of the index name as a nickname or an alias for the folder", "import datetime as DateTime import click from mnotes.environment import MnoteEnvironment, pass_env, echo_line, save_global_index_data" ]
[ "= (map != 0.).float() intersection = gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1),", "for i, (imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap =", "= np.unique(map) objects = np.delete(objects, [0, -1]) for o in objects: masko =", "bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps)", "bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard", "VOCDataset from torch.utils.data import DataLoader from torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur", "idx = torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn", "= (masko + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco =", "0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection / union jac +=", "val /= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn = cumval >", "1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we keep only a certain percentage", "blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1)", ".reshape(bs, nh, -1) # we keep only a certain percentage of the mass", "dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])", "20 + \"Evaluating attention maps on VOC2012 with threshold: \" + str(self.threshold) +", "with threshold: \" + str(self.threshold) + \"#\" * 20) jacs_merged_attn = 0 jacs_all_heads", "is present use teacher attention as it is also used during training if", "0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root,", "pl_module.model model.eval() for i, (imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size", "// patch_size h_featmap = imgs.shape[-1] // patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs", "torch.Tensor: jacs = 0 nh = attentions.shape[1] # number of heads # we", "# number of heads # we keep only a certain percentage of the", "head in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn =", "objects: masko = map == o intersection = masko * th_attn[k] intersection =", "= attentions.shape[1] # number of heads # we keep only a certain percentage", "= torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() #", "num_workers: int, threshold: float = 0.6): # Setup transforms and dataloader pvoc image_transforms", "j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) +", "print(\"\\n\" + \"#\" * 20 + \"Evaluating attention maps on VOC2012 with threshold:", "\"#\" * 20) jacs_merged_attn = 0 jacs_all_heads = 0 # If teacher is", "pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap:", "0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\",", "torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor:", "import InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height:", "maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads", "'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher else: patch_size = pl_module.model.patch_size model =", "image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms =", "torch import torch.nn as nn import pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset", "for each image for k, map in enumerate(maps): jac = 0 objects =", "in range(attentions.size(1))) nh = 1 # number of heads is one as we", "0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs += intersection / union return jacs", "std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"),", "w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0", "heads is one as we merged all heads # Gaussian blurring attentions =", "patch_size = pl_module.teacher.patch_size model = pl_module.teacher else: patch_size = pl_module.model.patch_size model = pl_module.model", "int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 # Average attentions attentions =", "also used during training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher", "jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /=", "we merged all heads # Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh,", "+ \"Evaluating attention maps on VOC2012 with threshold: \" + str(self.threshold) + \"#\"", "Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc',", "percentage of the mass val, idx = torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True)", "if np.sum(mask) <= 2: th_attn[j, 0][mask] = 0 # interpolate th_attn = nn.functional.interpolate(th_attn,", "training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher else: patch_size =", "DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer,", "str, train_input_height: int, attn_batch_size: int, num_workers: int, threshold: float = 0.6): # Setup", "enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] // patch_size with torch.no_grad():", "gt_fg_mask = (map != 0.).float() intersection = gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection,", "k, map in enumerate(maps): gt_fg_mask = (map != 0.).float() intersection = gt_fg_mask *", "patch_size = pl_module.model.patch_size model = pl_module.model model.eval() for i, (imgs, maps) in enumerate(self.loader):", "th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for k,", "-> torch.Tensor: jacs = 0 # Average attentions attentions = sum(attentions[:, i] *", "__init__(self, voc_root: str, train_input_height: int, attn_batch_size: int, num_workers: int, threshold: float = 0.6):", "index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn,", "protocols: merged attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size,", "mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for k, map in enumerate(maps): gt_fg_mask", "# Setup transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456,", "as it is also used during training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size", "= torch.argsort(idx) for head in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:,", "/= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\")", "th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx) for head in", "0 # If teacher is present use teacher attention as it is also", "import numpy as np import os import torch import torch.nn as nn import", "nh, -1) # we keep only a certain percentage of the mass val,", "label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int, attn_batch_size: int, num_workers: int,", "as we merged all heads # Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs *", "= torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() #", "= 0.6): # Setup transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(),", "= VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True,", "nh = 1 # number of heads is one as we merged all", "= torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs,", "o intersection = masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union =", "h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12:", "torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn = cumval", "idx2 = torch.argsort(idx) for head in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1,", "on VOC2012 with threshold: \" + str(self.threshold) + \"#\" * 20) jacs_merged_attn =", "Evaluate attention maps. if pl_module.global_rank == 0 and pl_module.local_rank == 0: print(\"\\n\" +", "attentions.shape[1] # number of heads # we keep only a certain percentage of", "model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[..., 0, 1:] # Evaluate two different", "pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader from torchvision.transforms", "attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor) ->", "bs = attentions.shape[0] attentions = attentions[..., 0, 1:] # Evaluate two different protocols:", "jacs += jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int,", "range(attentions.size(1))) nh = 1 # number of heads is one as we merged", "= labelled == k if np.sum(mask) <= 2: th_attn[j, 0][mask] = 0 #", "th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko + th_attn[k]) > 0", "present use teacher attention as it is also used during training if hasattr(pl_module,", "then 3 pixels for j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k", "torchvision.transforms.functional import InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str,", "from torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from", "= Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader", "= np.delete(objects, [0, -1]) for o in objects: masko = map == o", "maps on VOC2012 with threshold: \" + str(self.threshold) + \"#\" * 20) jacs_merged_attn", "torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[..., 0, 1:] #", "threshold: \" + str(self.threshold) + \"#\" * 20) jacs_merged_attn = 0 jacs_all_heads =", "int, h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 #", "(1 - self.threshold) idx2 = torch.argsort(idx) for head in range(nh): th_attn[:, head] =", "Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure import label class", "attention as it is also used during training if hasattr(pl_module, 'teacher'): patch_size =", "model.eval() for i, (imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap", "are less then 3 pixels for j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy())", "= pl_module.model.patch_size model = pl_module.model model.eval() for i, (imgs, maps) in enumerate(self.loader): w_featmap", "evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor)", "remove components that are less then 3 pixels for j, th_att in enumerate(th_attn):", "drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate", "patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset)", "= pl_module.teacher else: patch_size = pl_module.model.patch_size model = pl_module.model model.eval() for i, (imgs,", "attention maps. if pl_module.global_rank == 0 and pl_module.local_rank == 0: print(\"\\n\" + \"#\"", "on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int,", "and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads +=", "jacs = 0 # Average attentions attentions = sum(attentions[:, i] * 1 /", "th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components that are less then", "maps: torch.Tensor) -> torch.Tensor: jacs = 0 nh = attentions.shape[1] # number of", "intersection / union jac += max(jaco) if len(objects) != 0: jac /= len(objects)", "on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank == 0", "import os import torch import torch.nn as nn import pytorch_lightning as pl from", "patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[..., 0,", "0: print(\"\\n\" + \"#\" * 20 + \"Evaluating attention maps on VOC2012 with", "Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure import label", "on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item())", "= Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height,", "+= self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset)", "threshold: float = 0.6): # Setup transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height,", "attn_batch_size: int, num_workers: int, threshold: float = 0.6): # Setup transforms and dataloader", "Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def", "> (1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1,", "attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[..., 0, 1:] # Evaluate", "target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms)", "= masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko +", "maps: torch.Tensor) -> torch.Tensor: jacs = 0 # Average attentions attentions = sum(attentions[:,", "= attentions.shape[0] attentions = attentions[..., 0, 1:] # Evaluate two different protocols: merged", "else: patch_size = pl_module.model.patch_size model = pl_module.model model.eval() for i, (imgs, maps) in", "transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,", "+ \"#\" * 20) jacs_merged_attn = 0 jacs_all_heads = 0 # If teacher", "evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor)", "self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers,", "for o in objects: masko = map == o intersection = masko *", "train_input_height: int, attn_batch_size: int, num_workers: int, threshold: float = 0.6): # Setup transforms", "* th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko + th_attn[k]) >", "each image for k, map in enumerate(maps): gt_fg_mask = (map != 0.).float() intersection", "= pl_module.teacher.patch_size model = pl_module.teacher else: patch_size = pl_module.model.patch_size model = pl_module.model model.eval()", "for i in range(attentions.size(1))) nh = 1 # number of heads is one", "3 pixels for j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in", "GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we keep", "use teacher attention as it is also used during training if hasattr(pl_module, 'teacher'):", "# remove components that are less then 3 pixels for j, th_att in", "dim=-1) jaco = intersection / union jac += max(jaco) if len(objects) != 0:", "head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float()", "patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 # Average attentions attentions", "th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap,", "jacs_merged_attn = 0 jacs_all_heads = 0 # If teacher is present use teacher", "def __init__(self, voc_root: str, train_input_height: int, attn_batch_size: int, num_workers: int, threshold: float =", "th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate", "pl_module: pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank == 0 and pl_module.local_rank ==", "voc_root: str, train_input_height: int, attn_batch_size: int, num_workers: int, threshold: float = 0.6): #", "IoU for each image for k, map in enumerate(maps): jac = 0 objects", "Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh,", "+= max(jaco) if len(objects) != 0: jac /= len(objects) jacs += jac return", "> (1 - self.threshold) idx2 = torch.argsort(idx) for head in range(nh): th_attn[:, head]", "w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we keep only a certain percentage of", "torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko + th_attn[k]) > 0 union = torch.sum(torch.sum(union,", "0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float()", "2: th_attn[j, 0][mask] = 0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() #", "h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 # Average", "* 1 / attentions.size(1) for i in range(attentions.size(1))) nh = 1 # number", "/ attentions.size(1) for i in range(attentions.size(1))) nh = 1 # number of heads", "jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int,", "pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader from torchvision.transforms import Compose,", "jac /= len(objects) jacs += jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs:", "/= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn = cumval > (1", "dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components that", "= imgs.shape[-1] // patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions", "/= len(objects) jacs += jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int,", "in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] // patch_size with", "num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): #", "and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,", "= model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[..., 0, 1:] # Evaluate two", "import DataLoader from torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import", "jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int,", "0, 1:] # Evaluate two different protocols: merged attention and best head jacs_merged_attn", "import torch.nn as nn import pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset from", "(map != 0.).float() intersection = gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1)", "// patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[...,", "0.).float() intersection = gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union =", "ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False,", "cumval > (1 - self.threshold) idx2 = torch.argsort(idx) for head in range(nh): th_attn[:,", "= torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection / union jac += max(jaco) if", "np import os import torch import torch.nn as nn import pytorch_lightning as pl", "- self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0])", "th_attn[j, 0][mask] = 0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate", "teacher is present use teacher attention as it is also used during training", "/ union jac += max(jaco) if len(objects) != 0: jac /= len(objects) jacs", "for head in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn", "== o intersection = masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union", "best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions,", "th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components that are less then 3 pixels", "nh, w_featmap, h_featmap).float() # remove components that are less then 3 pixels for", "pl_module.global_rank == 0 and pl_module.local_rank == 0: print(\"\\n\" + \"#\" * 20 +", "= 0 # If teacher is present use teacher attention as it is", "\"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold", "\"Evaluating attention maps on VOC2012 with threshold: \" + str(self.threshold) + \"#\" *", "0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components that are less", "jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int,", "= th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components that are less then 3", "= sum(attentions[:, i] * 1 / attentions.size(1) for i in range(attentions.size(1))) nh =", "0 and pl_module.local_rank == 0: print(\"\\n\" + \"#\" * 20 + \"Evaluating attention", "th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] =", "= map == o intersection = masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1),", "class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int, attn_batch_size: int, num_workers: int, threshold:", "union jac += max(jaco) if len(objects) != 0: jac /= len(objects) jacs +=", "k, map in enumerate(maps): jac = 0 objects = np.unique(map) objects = np.delete(objects,", "threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank", "dim=-1), dim=-1) union = (masko + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1),", "patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 nh = attentions.shape[1] #", "VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True)", "th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs += intersection / union", "0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms,", "= th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() #", "return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size:", "components that are less then 3 pixels for j, th_att in enumerate(th_attn): labelled", "enumerate(maps): gt_fg_mask = (map != 0.).float() intersection = gt_fg_mask * th_attn[k] intersection =", "np.delete(objects, [0, -1]) for o in objects: masko = map == o intersection", "= torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko + th_attn[k]) > 0 union =", "each image for k, map in enumerate(maps): jac = 0 objects = np.unique(map)", "# Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs,", "interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for", "> 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs += intersection / union return", "== k if np.sum(mask) <= 2: th_attn[j, 0][mask] = 0 # interpolate th_attn", "jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads", "intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko + th_attn[k]) > 0 union", "If teacher is present use teacher attention as it is also used during", "# number of heads is one as we merged all heads # Gaussian", "all heads # Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap,", "attentions attentions = sum(attentions[:, i] * 1 / attentions.size(1) for i in range(attentions.size(1)))", "idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn =", "jacs_all_heads = 0 # If teacher is present use teacher attention as it", "certain percentage of the mass val, idx = torch.sort(attentions) val /= torch.sum(val, dim=-1,", "of heads is one as we merged all heads # Gaussian blurring attentions", "len(objects) != 0: jac /= len(objects) jacs += jac return jacs def evaluate_merged_attentions(self,", "20) jacs_merged_attn = 0 jacs_all_heads = 0 # If teacher is present use", "map in enumerate(maps): gt_fg_mask = (map != 0.).float() intersection = gt_fg_mask * th_attn[k]", "w_featmap = imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] // patch_size with torch.no_grad(): attentions", "import pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader from", "a certain percentage of the mass val, idx = torch.sort(attentions) val /= torch.sum(val,", "float = 0.6): # Setup transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)),", "self.threshold) idx2 = torch.argsort(idx) for head in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head],", "is one as we merged all heads # Gaussian blurring attentions = GaussianBlur(7,", "union = (masko + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco", "intersection = gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask", "# Calculate IoU for each image for k, map in enumerate(maps): gt_fg_mask =", "0 nh = attentions.shape[1] # number of heads # we keep only a", "cumval = torch.cumsum(val, dim=-1) th_attn = cumval > (1 - self.threshold) idx2 =", "print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item())", "> 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection / union jac", "dim=-1) th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0]", "torch.nn as nn import pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data", "of the mass val, idx = torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval", "1:] # Evaluate two different protocols: merged attention and best head jacs_merged_attn +=", "def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank ==", "= 0 # Average attentions attentions = sum(attentions[:, i] * 1 / attentions.size(1)", "1 # number of heads is one as we merged all heads #", "range(1, np.max(labelled) + 1): mask = labelled == k if np.sum(mask) <= 2:", "in enumerate(maps): jac = 0 objects = np.unique(map) objects = np.delete(objects, [0, -1])", "union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection / union jac += max(jaco)", "number of heads is one as we merged all heads # Gaussian blurring", "pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size:", "image for k, map in enumerate(maps): gt_fg_mask = (map != 0.).float() intersection =", "int, threshold: float = 0.6): # Setup transforms and dataloader pvoc image_transforms =", "if len(objects) != 0: jac /= len(objects) jacs += jac return jacs def", "for j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in range(1, np.max(labelled)", "dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn =", "1 / attentions.size(1) for i in range(attentions.size(1))) nh = 1 # number of", "train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset,", "intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k]) > 0 union", "from torchvision.transforms.functional import InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root:", "pl_module.local_rank == 0: print(\"\\n\" + \"#\" * 20 + \"Evaluating attention maps on", "np.max(labelled) + 1): mask = labelled == k if np.sum(mask) <= 2: th_attn[j,", "cumval > (1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0],", "torch.utils.data import DataLoader from torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional", "transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold", "\"#\" * 20 + \"Evaluating attention maps on VOC2012 with threshold: \" +", "val, idx = torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1)", "if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher else: patch_size = pl_module.model.patch_size", "+ 1): mask = labelled == k if np.sum(mask) <= 2: th_attn[j, 0][mask]", "import VOCDataset from torch.utils.data import DataLoader from torchvision.transforms import Compose, Resize, ToTensor, Normalize,", "# Calculate IoU for each image for k, map in enumerate(maps): jac =", "from torch.utils.data import DataLoader from torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur from", "from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader from torchvision.transforms import Compose, Resize,", "+ th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs += intersection /", "dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn = cumval > (1 - self.threshold)", "(1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:,", "<= 2: th_attn[j, 0][mask] = 0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy()", "hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher else: patch_size = pl_module.model.patch_size model", "for k, map in enumerate(maps): jac = 0 objects = np.unique(map) objects =", "data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader from torchvision.transforms import Compose, Resize, ToTensor,", "o in objects: masko = map == o intersection = masko * th_attn[k]", "the mass val, idx = torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval =", "= torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k]) > 0 union =", "InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int,", "VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap:", "torch.Tensor) -> torch.Tensor: jacs = 0 # Average attentions attentions = sum(attentions[:, i]", "that are less then 3 pixels for j, th_att in enumerate(th_attn): labelled =", "maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All", "max(jaco) if len(objects) != 0: jac /= len(objects) jacs += jac return jacs", "range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh,", "h_featmap).float() # remove components that are less then 3 pixels for j, th_att", "i in range(attentions.size(1))) nh = 1 # number of heads is one as", "index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components that are", "- self.threshold) idx2 = torch.argsort(idx) for head in range(nh): th_attn[:, head] = torch.gather(th_attn[:,", "torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure", "# Evaluate attention maps. if pl_module.global_rank == 0 and pl_module.local_rank == 0: print(\"\\n\"", "0 objects = np.unique(map) objects = np.delete(objects, [0, -1]) for o in objects:", "print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions:", "+ str(self.threshold) + \"#\" * 20) jacs_merged_attn = 0 jacs_all_heads = 0 #", "attentions.shape[0] attentions = attentions[..., 0, 1:] # Evaluate two different protocols: merged attention", "Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height),", "torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k]) > 0 union = torch.sum(torch.sum(union,", "attentions = attentions[..., 0, 1:] # Evaluate two different protocols: merged attention and", "-1) # we keep only a certain percentage of the mass val, idx", "th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap,", "= pl_module.model model.eval() for i, (imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] //", "{jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self,", "dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1),", "= label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) + 1): mask = labelled ==", "attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) #", "len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on", "nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for k, map in", "dim=-1) union = (masko + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1)", "union = (gt_fg_mask + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs", "gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k])", "int, num_workers: int, threshold: float = 0.6): # Setup transforms and dataloader pvoc", "th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k]) > 0", "objects = np.delete(objects, [0, -1]) for o in objects: masko = map ==", "as np import os import torch import torch.nn as nn import pytorch_lightning as", "= torch.cumsum(val, dim=-1) th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx)", "in objects: masko = map == o intersection = masko * th_attn[k] intersection", "0 # Average attentions attentions = sum(attentions[:, i] * 1 / attentions.size(1) for", "np.sum(mask) <= 2: th_attn[j, 0][mask] = 0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size,", "= 0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for", "Calculate IoU for each image for k, map in enumerate(maps): gt_fg_mask = (map", "w_featmap, h_featmap).float() # remove components that are less then 3 pixels for j,", "!= 0.).float() intersection = gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union", "# Evaluate two different protocols: merged attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions,", "-> torch.Tensor: jacs = 0 nh = attentions.shape[1] # number of heads #", "nh, w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU", "for k, map in enumerate(maps): gt_fg_mask = (map != 0.).float() intersection = gt_fg_mask", "head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs,", "self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn", "attentions[..., 0, 1:] # Evaluate two different protocols: merged attention and best head", "merged all heads # Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1,", "w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for", "used during training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher else:", "dim=-1), dim=-1) jaco = intersection / union jac += max(jaco) if len(objects) !=", "EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int, attn_batch_size: int, num_workers: int, threshold: float", "in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) + 1): mask", "DataLoader from torchvision.transforms import Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode", "0.6): # Setup transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485,", "{jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int,", "jacs = 0 nh = attentions.shape[1] # number of heads # we keep", "= torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn =", "model = pl_module.teacher else: patch_size = pl_module.model.patch_size model = pl_module.model model.eval() for i,", "int, w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs =", "= cumval > (1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:,", "ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback):", "jaco = intersection / union jac += max(jaco) if len(objects) != 0: jac", "int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 # Average attentions", "pixels for j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in range(1,", "torch.Tensor: jacs = 0 # Average attentions attentions = sum(attentions[:, i] * 1", "Calculate IoU for each image for k, map in enumerate(maps): jac = 0", "two different protocols: merged attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap,", "shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule):", "model = pl_module.model model.eval() for i, (imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2]", "image for k, map in enumerate(maps): jac = 0 objects = np.unique(map) objects", "(gt_fg_mask + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs += intersection", "maps. if pl_module.global_rank == 0 and pl_module.local_rank == 0: print(\"\\n\" + \"#\" *", "from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int, attn_batch_size:", "(masko + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection", "merged attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps)", "masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko + th_attn[k])", "different protocols: merged attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap,", "masko = map == o intersection = masko * th_attn[k] intersection = torch.sum(torch.sum(intersection,", "scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for k, map in enumerate(maps):", "self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged", "= 0 jacs_all_heads = 0 # If teacher is present use teacher attention", "import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int, attn_batch_size: int, num_workers:", "and pl_module.local_rank == 0: print(\"\\n\" + \"#\" * 20 + \"Evaluating attention maps", "w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on", "+ \"#\" * 20 + \"Evaluating attention maps on VOC2012 with threshold: \"", "i] * 1 / attentions.size(1) for i in range(attentions.size(1))) nh = 1 #", "= 0 nh = attentions.shape[1] # number of heads # we keep only", "= threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps. if", "h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each", "map in enumerate(maps): jac = 0 objects = np.unique(map) objects = np.delete(objects, [0,", "th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection / union", "pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank == 0 and pl_module.local_rank == 0:", "= cumval > (1 - self.threshold) idx2 = torch.argsort(idx) for head in range(nh):", "keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn = cumval > (1 - self.threshold) idx2", "self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps.", "target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def", "int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 nh = attentions.shape[1] # number", "Average attentions attentions = sum(attentions[:, i] * 1 / attentions.size(1) for i in", "mask = labelled == k if np.sum(mask) <= 2: th_attn[j, 0][mask] = 0", "th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) + 1):", "== 0: print(\"\\n\" + \"#\" * 20 + \"Evaluating attention maps on VOC2012", "jac = 0 objects = np.unique(map) objects = np.delete(objects, [0, -1]) for o", "-1]) for o in objects: masko = map == o intersection = masko", "/= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard", "h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn /=", "h_featmap))\\ .reshape(bs, nh, -1) # we keep only a certain percentage of the", "1): mask = labelled == k if np.sum(mask) <= 2: th_attn[j, 0][mask] =", "imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] // patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device))", "pl_module.teacher else: patch_size = pl_module.model.patch_size model = pl_module.model model.eval() for i, (imgs, maps)", "Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader =", "trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank == 0 and", "with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions = attentions[..., 0, 1:]", "0][mask] = 0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU", "imgs.shape[-1] // patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0] attentions =", "for each image for k, map in enumerate(maps): gt_fg_mask = (map != 0.).float()", "import Compose, Resize, ToTensor, Normalize, GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure import", "nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we keep only a certain", "k if np.sum(mask) <= 2: th_attn[j, 0][mask] = 0 # interpolate th_attn =", "dim=-1) union = (gt_fg_mask + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1)", "str(self.threshold) + \"#\" * 20) jacs_merged_attn = 0 jacs_all_heads = 0 # If", "we keep only a certain percentage of the mass val, idx = torch.sort(attentions)", "+= self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap,", "GaussianBlur from torchvision.transforms.functional import InterpolationMode from skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self,", "!= 0: jac /= len(objects) jacs += jac return jacs def evaluate_merged_attentions(self, attentions:", "nh = attentions.shape[1] # number of heads # we keep only a certain", "as nn import pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import", "jac += max(jaco) if len(objects) != 0: jac /= len(objects) jacs += jac", "mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for k, map in enumerate(maps): jac", "# If teacher is present use teacher attention as it is also used", "label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) + 1): mask = labelled == k", "np.unique(map) objects = np.delete(objects, [0, -1]) for o in objects: masko = map", "= DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer:", "intersection = masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (masko", "\" + str(self.threshold) + \"#\" * 20) jacs_merged_attn = 0 jacs_all_heads = 0", "len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc',", "self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size,", "+= jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap:", "* 20 + \"Evaluating attention maps on VOC2012 with threshold: \" + str(self.threshold)", "is also used during training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model =", "dim=-1) th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx) for head", "enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) + 1): mask =", "skimage.measure import label class EvaluateAttnMaps(pl.callbacks.Callback): def __init__(self, voc_root: str, train_input_height: int, attn_batch_size: int,", "during training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model = pl_module.teacher else: patch_size", "jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12:", "objects = np.unique(map) objects = np.delete(objects, [0, -1]) for o in objects: masko", "numpy as np import os import torch import torch.nn as nn import pytorch_lightning", "torch.argsort(idx) th_attn[:, 0] = torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh,", "def evaluate_best_head(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps:", "nn import pytorch_lightning as pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader", "self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self,", "Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset", "less then 3 pixels for j, th_att in enumerate(th_attn): labelled = label(th_att.cpu().numpy()) for", "jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int,", "as pl from data.VOCdevkit.vocdata import VOCDataset from torch.utils.data import DataLoader from torchvision.transforms import", "attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads", "bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs", "* nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we keep only a", "int, h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 nh", "torch.cumsum(val, dim=-1) th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx) for", "teacher attention as it is also used during training if hasattr(pl_module, 'teacher'): patch_size", "int, attn_batch_size: int, num_workers: int, threshold: float = 0.6): # Setup transforms and", "interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset = VOCDataset(root=os.path.join(voc_root, \"VOCSegmentation\"), image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size,", "keep only a certain percentage of the mass val, idx = torch.sort(attentions) val", "def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap: int, h_featmap: int, patch_size: int, maps:", "labelled = label(th_att.cpu().numpy()) for k in range(1, np.max(labelled) + 1): mask = labelled", "import torch import torch.nn as nn import pytorch_lightning as pl from data.VOCdevkit.vocdata import", "VOC2012 with threshold: \" + str(self.threshold) + \"#\" * 20) jacs_merged_attn = 0", "h_featmap = imgs.shape[-1] // patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs = attentions.shape[0]", "(imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] //", "sum(attentions[:, i] * 1 / attentions.size(1) for i in range(attentions.size(1))) nh = 1", "0 # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each", "= intersection / union jac += max(jaco) if len(objects) != 0: jac /=", "enumerate(maps): jac = 0 objects = np.unique(map) objects = np.delete(objects, [0, -1]) for", "[0, -1]) for o in objects: masko = map == o intersection =", "attentions.size(1) for i in range(attentions.size(1))) nh = 1 # number of heads is", "Setup transforms and dataloader pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406],", "in range(1, np.max(labelled) + 1): mask = labelled == k if np.sum(mask) <=", "pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention", "torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val, dim=-1) th_attn = cumval > (1 -", "train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST),", "heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor,", "attentions = sum(attentions[:, i] * 1 / attentions.size(1) for i in range(attentions.size(1))) nh", "# we keep only a certain percentage of the mass val, idx =", "only a certain percentage of the mass val, idx = torch.sort(attentions) val /=", "h_featmap: int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 nh =", "= GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we", "os import torch import torch.nn as nn import pytorch_lightning as pl from data.VOCdevkit.vocdata", "patch_size, maps) jacs_merged_attn /= len(self.dataset) jacs_all_heads /= len(self.dataset) print(f\"Merged Jaccard on VOC12: {jacs_merged_attn.item()}\")", "torch.gather(th_attn[:, 0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove", "labelled == k if np.sum(mask) <= 2: th_attn[j, 0][mask] = 0 # interpolate", "one as we merged all heads # Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs", "i, (imgs, maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1]", "= (gt_fg_mask + th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jacs +=", "* 20) jacs_merged_attn = 0 jacs_all_heads = 0 # If teacher is present", "torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate", "torch.cumsum(val, dim=-1) th_attn = cumval > (1 - self.threshold) idx2 = torch.argsort(idx) th_attn[:,", "pl_module.teacher.patch_size model = pl_module.teacher else: patch_size = pl_module.model.patch_size model = pl_module.model model.eval() for", "= 1 # number of heads is one as we merged all heads", "torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection / union jac += max(jaco) if len(objects)", "* th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask + th_attn[k]) >", "0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()]) self.dataset =", "in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs,", "= nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each image for k, map", "len(objects) jacs += jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor, bs: int, w_featmap:", "int, patch_size: int, maps: torch.Tensor) -> torch.Tensor: jacs = 0 nh = attentions.shape[1]", "image_set=\"val\", transform=image_transforms, target_transform=target_transforms) self.loader = DataLoader(self.dataset, batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold =", "Evaluate two different protocols: merged attention and best head jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs,", "maps) in enumerate(self.loader): w_featmap = imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] // patch_size", "heads # we keep only a certain percentage of the mass val, idx", "ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms = Compose([Resize((train_input_height, train_input_height), interpolation=InterpolationMode.NEAREST), ToTensor()])", "th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy()", "in enumerate(maps): gt_fg_mask = (map != 0.).float() intersection = gt_fg_mask * th_attn[k] intersection", "pl.Trainer, pl_module: pl.LightningModule): # Evaluate attention maps. if pl_module.global_rank == 0 and pl_module.local_rank", "number of heads # we keep only a certain percentage of the mass", "mass val, idx = torch.sort(attentions) val /= torch.sum(val, dim=-1, keepdim=True) cumval = torch.cumsum(val,", "for k in range(1, np.max(labelled) + 1): mask = labelled == k if", "Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def evaluate_best_head(self, attentions: torch.Tensor, bs:", "# Average attentions attentions = sum(attentions[:, i] * 1 / attentions.size(1) for i", "VOC12: {jacs_merged_attn.item()}\") print(f\"All heads Jaccard on VOC12: {jacs_all_heads.item()}\") pl_module.logger.experiment.log_metric('attn_jacs_voc', jacs_merged_attn.item()) pl_module.logger.experiment.log_metric('all_heads_jacs_voc', jacs_all_heads.item()) def", "= 0 objects = np.unique(map) objects = np.delete(objects, [0, -1]) for o in", "sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\ .reshape(bs, nh, -1) # we keep only", "k in range(1, np.max(labelled) + 1): mask = labelled == k if np.sum(mask)", "0], dim=1, index=idx2[:, 0]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # remove components", "= attentions[..., 0, 1:] # Evaluate two different protocols: merged attention and best", "batch_size=attn_batch_size, shuffle=False, num_workers=num_workers, drop_last=True, pin_memory=True) self.threshold = threshold def on_validation_start(self, trainer: pl.Trainer, pl_module:", "heads # Gaussian blurring attentions = GaussianBlur(7, sigma=(.6))(attentions.reshape(bs * nh, 1, w_featmap, h_featmap))\\", "jacs_merged_attn += self.evaluate_merged_attentions(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap,", "= imgs.shape[-2] // patch_size h_featmap = imgs.shape[-1] // patch_size with torch.no_grad(): attentions =", "torch.Tensor) -> torch.Tensor: jacs = 0 nh = attentions.shape[1] # number of heads", "w_featmap, h_featmap, patch_size, maps) jacs_all_heads += self.evaluate_best_head(attentions, bs, w_featmap, h_featmap, patch_size, maps) jacs_merged_attn", "patch_size h_featmap = imgs.shape[-1] // patch_size with torch.no_grad(): attentions = model.get_last_selfattention(imgs.to(pl_module.device)) bs =", "= gt_fg_mask * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1) union = (gt_fg_mask +", "pl_module.model.patch_size model = pl_module.model model.eval() for i, (imgs, maps) in enumerate(self.loader): w_featmap =", "head], dim=1, index=idx2[:, head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn", "if pl_module.global_rank == 0 and pl_module.local_rank == 0: print(\"\\n\" + \"#\" * 20", "# interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size, mode=\"nearest\").cpu().numpy() # Calculate IoU for each image", "of heads # we keep only a certain percentage of the mass val,", "head]) th_attn = th_attn.reshape(bs, nh, w_featmap, h_featmap).float() # interpolate th_attn = nn.functional.interpolate(th_attn, scale_factor=patch_size,", "torch.argsort(idx) for head in range(nh): th_attn[:, head] = torch.gather(th_attn[:, head], dim=1, index=idx2[:, head])", "it is also used during training if hasattr(pl_module, 'teacher'): patch_size = pl_module.teacher.patch_size model", "attention maps on VOC2012 with threshold: \" + str(self.threshold) + \"#\" * 20)", "== 0 and pl_module.local_rank == 0: print(\"\\n\" + \"#\" * 20 + \"Evaluating", "map == o intersection = masko * th_attn[k] intersection = torch.sum(torch.sum(intersection, dim=-1), dim=-1)", "0: jac /= len(objects) jacs += jac return jacs def evaluate_merged_attentions(self, attentions: torch.Tensor,", "pvoc image_transforms = Compose([Resize((train_input_height, train_input_height)), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) target_transforms", "IoU for each image for k, map in enumerate(maps): gt_fg_mask = (map !=", "+ th_attn[k]) > 0 union = torch.sum(torch.sum(union, dim=-1), dim=-1) jaco = intersection /", "0 jacs_all_heads = 0 # If teacher is present use teacher attention as" ]
[ "send_mes(): while True: print('') outdata = input('') print() for client in clients: client.send(f\"Сервер:", "= '\\n') def recv_data(client): while True: try: indata = client.recv(1024) except Exception: clients.remove(client)", "end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 =", "server.listen(5) clients = list() end = list() def get(): while True: client, addr", "except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов: {len (clients)}', end =", "for i in clients: if i in end: continue index = threading.Thread(target=recv_data, args=(i,))", "while True: for i in clients: if i in end: continue index =", "in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for i in clients: if", "t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start() t2.join() for", "for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for i in", "server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество клиентов: {len (clients)}', end = '\\n')", "recv_data(client): while True: try: indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер", "клиентов: {len (clients)}', end = '\\n') def recv_data(client): while True: try: indata =", "i in end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes,", "= threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start() t2.join() for i in", "socket, threading server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090 server.bind((host,", "end = '\\n') break print(indata.decode('utf-8')) for i in clients: if i != client:", "threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out')", "clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов: {len (clients)}', end = '\\n') break", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5) clients =", "f'Сервер отключен: количество клиентов: {len (clients)}', end = '\\n') break print(indata.decode('utf-8')) for i", "if i in end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 =", "= list() end = list() def get(): while True: client, addr = server.accept()", "подключен через {addr}: количество клиентов: {len (clients)}', end = '\\n') def recv_data(client): while", "while True: print('') outdata = input('') print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)'))", "= list() def get(): while True: client, addr = server.accept() clients.append(client) print(f'сервер подключен", "print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for i", "t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start() t2.join() for i", "= server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество клиентов: {len (clients)}', end =", "def send_mes(): while True: print('') outdata = input('') print() for client in clients:", "def get(): while True: client, addr = server.accept() clients.append(client) print(f'сервер подключен через {addr}:", "True: try: indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество", "end = list() def get(): while True: client, addr = server.accept() clients.append(client) print(f'сервер", "{len (clients)}', end = '\\n') break print(indata.decode('utf-8')) for i in clients: if i", "{addr}: количество клиентов: {len (clients)}', end = '\\n') def recv_data(client): while True: try:", "(clients)}', end = '\\n') break print(indata.decode('utf-8')) for i in clients: if i !=", "= 9090 server.bind((host, port)) server.listen(5) clients = list() end = list() def get():", "print(f'сервер подключен через {addr}: количество клиентов: {len (clients)}', end = '\\n') def recv_data(client):", "clients: if i != client: i.send(indata) def send_mes(): while True: print('') outdata =", "True: for i in clients: if i in end: continue index = threading.Thread(target=recv_data,", "index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 =", "server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5)", "end = '\\n') def recv_data(client): while True: try: indata = client.recv(1024) except Exception:", "количество клиентов: {len (clients)}', end = '\\n') break print(indata.decode('utf-8')) for i in clients:", "if i != client: i.send(indata) def send_mes(): while True: print('') outdata = input('')", "in clients: if i in end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i)", "outdata = input('') print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while", "(clients)}', end = '\\n') def recv_data(client): while True: try: indata = client.recv(1024) except", "9090 server.bind((host, port)) server.listen(5) clients = list() end = list() def get(): while", "i in clients: if i in end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start()", "= '\\n') break print(indata.decode('utf-8')) for i in clients: if i != client: i.send(indata)", "addr = server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество клиентов: {len (clients)}', end", "host = \"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5) clients = list() end", "Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов: {len (clients)}', end = '\\n')", "client, addr = server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество клиентов: {len (clients)}',", "for i in clients: if i != client: i.send(indata) def send_mes(): while True:", "get_mes(): while True: for i in clients: if i in end: continue index", "input('') print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for", "port)) server.listen(5) clients = list() end = list() def get(): while True: client,", "while True: client, addr = server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество клиентов:", "True: client, addr = server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество клиентов: {len", "i in clients: if i != client: i.send(indata) def send_mes(): while True: print('')", "{outdata}\".encode('utf-8)')) def get_mes(): while True: for i in clients: if i in end:", "args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start()", "threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start()", "= threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes,", "отключен: количество клиентов: {len (clients)}', end = '\\n') break print(indata.decode('utf-8')) for i in", "clients = list() end = list() def get(): while True: client, addr =", "threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start() t2.join() for i in clients:", "t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(),", "def get_mes(): while True: for i in clients: if i in end: continue", "index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3", "print('') outdata = input('') print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes():", "continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start() t2", "client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов: {len (clients)}', end", "client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for i in clients:", "!= client: i.send(indata) def send_mes(): while True: print('') outdata = input('') print() for", "\"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5) clients = list() end = list()", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5) clients", "in end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input')", "= \"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5) clients = list() end =", "name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start() t2.join()", "port = 9090 server.bind((host, port)) server.listen(5) clients = list() end = list() def", "print(indata.decode('utf-8')) for i in clients: if i != client: i.send(indata) def send_mes(): while", "def recv_data(client): while True: try: indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client) print(", "threading server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090 server.bind((host, port))", "i != client: i.send(indata) def send_mes(): while True: print('') outdata = input('') print()", "{len (clients)}', end = '\\n') def recv_data(client): while True: try: indata = client.recv(1024)", "'\\n') break print(indata.decode('utf-8')) for i in clients: if i != client: i.send(indata) def", "indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов: {len", "break print(indata.decode('utf-8')) for i in clients: if i != client: i.send(indata) def send_mes():", "i.send(indata) def send_mes(): while True: print('') outdata = input('') print() for client in", "import socket, threading server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090", "get(): while True: client, addr = server.accept() clients.append(client) print(f'сервер подключен через {addr}: количество", "end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1 = threading.Thread(target=send_mes, name='input') t1.start()", "server.bind((host, port)) server.listen(5) clients = list() end = list() def get(): while True:", "list() def get(): while True: client, addr = server.accept() clients.append(client) print(f'сервер подключен через", "clients.append(client) print(f'сервер подключен через {addr}: количество клиентов: {len (clients)}', end = '\\n') def", "True: print('') outdata = input('') print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def", "= input('') print() for client in clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True:", "client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for i in clients: if i in", "in clients: if i != client: i.send(indata) def send_mes(): while True: print('') outdata", "end.remove(client) print( f'Сервер отключен: количество клиентов: {len (clients)}', end = '\\n') break print(indata.decode('utf-8'))", "= client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов: {len (clients)}',", "name='out') t2.start() t3 = threading.Thread(target=get(), name='get') t3.start() t2.join() for i in clients: i.close()", "через {addr}: количество клиентов: {len (clients)}', end = '\\n') def recv_data(client): while True:", "= threading.Thread(target=send_mes, name='input') t1.start() t2 = threading.Thread(target=get_mes, name='out') t2.start() t3 = threading.Thread(target=get(), name='get')", "list() end = list() def get(): while True: client, addr = server.accept() clients.append(client)", "print( f'Сервер отключен: количество клиентов: {len (clients)}', end = '\\n') break print(indata.decode('utf-8')) for", "while True: try: indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен:", "clients: if i in end: continue index = threading.Thread(target=recv_data, args=(i,)) index.start() end.append(i) t1", "socket.SOCK_STREAM) host = \"127.0.0.1\" port = 9090 server.bind((host, port)) server.listen(5) clients = list()", "clients: client.send(f\"Сервер: {outdata}\".encode('utf-8)')) def get_mes(): while True: for i in clients: if i", "'\\n') def recv_data(client): while True: try: indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client)", "количество клиентов: {len (clients)}', end = '\\n') def recv_data(client): while True: try: indata", "try: indata = client.recv(1024) except Exception: clients.remove(client) end.remove(client) print( f'Сервер отключен: количество клиентов:", "клиентов: {len (clients)}', end = '\\n') break print(indata.decode('utf-8')) for i in clients: if", "client: i.send(indata) def send_mes(): while True: print('') outdata = input('') print() for client" ]
[ "Linear(14 * 14, 28 * 28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x", "import nn from torch import cuda import torch.nn.functional as F from torchvision import", "print('Model trained and saved') else: # Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader)", "# Hyper params latent_size = 10 hidden_size = 256 epochs = 3 batch_size", "MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x", "device device = T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset trans = transforms.ToTensor()", "torchvision from torchvision.datasets import MNIST from torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d,", "2, 0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size", "latent_size) net.to(device) if train_or_test == 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded')", "device = T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset trans = transforms.ToTensor() dataset", "plt import torch as T from torch import nn from torch import cuda", "ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 * 14, 28 * 28) def encode(self,", "models_dir + '/deep_autoencoder' # Training device device = T.device('cuda:0' if cuda.is_available() else 'cpu')", "{e + 1:2d}, Batch {i + 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss =", "forward(self, x): return self.decode(self.encode(x)) # Hyper params latent_size = 10 hidden_size = 256", "= x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14 *", "dataiter = iter(loader) images, _ = dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) #", "# Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1,", "{i + 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(),", "net(x) # Back prop loss = criterion(y, x) loss.backward() optim.step() avg_loss += loss.item()", "Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im from utils import dataset_dir, models_dir", "= Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14", "print_freq == print_freq - 1: print(f'Epoch {e + 1:2d}, Batch {i + 1:5d},", ".0002 train_or_test = 'test' path = models_dir + '/deep_autoencoder' # Training device device", "gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module): def __init__(self,", "trained and saved') else: # Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images,", "10 learning_rate = .0002 train_or_test = 'test' path = models_dir + '/deep_autoencoder' #", "self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 * 14, 28 * 28)", "# To compress data : net.encode(data) # To decompress data : net.decode(data) #", "Only inputs (no labels) inputs, _ = data # Zero the parameter gradients", "predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for", "self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1,", "def forward(self, x): return self.decode(self.encode(x)) # Hyper params latent_size = 10 hidden_size =", "betas=(.9, .999)) criterion = nn.MSELoss() for e in range(epochs): avg_loss = 0 for", "+ 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(), path)", "matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data) # To", "+ 1:2d}, Batch {i + 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0", "print('Model loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss()", "+ '/deep_autoencoder' # Training device device = T.device('cuda:0' if cuda.is_available() else 'cpu') #", "# Show in matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0)))", "dataset_dir, models_dir # Displays an image (1 dim tensor) # t has values", "avg_loss += loss.item() # Stats print_freq = 100 if i % print_freq ==", "transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2,", "14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28) return x def", "loss.backward() optim.step() avg_loss += loss.item() # Stats print_freq = 100 if i %", "28 * 28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x", "import torchvision from torchvision.datasets import MNIST from torch.nn import ReLU, Linear, Sigmoid, Conv2d,", "14 * 14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28) return", "800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x", "net = Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train': # Load if os.path.exists(path):", "Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2", "as F from torchvision import transforms import torchvision from torchvision.datasets import MNIST from", "Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1", "dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds =", "download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net = Net(hidden_size,", "= data # Zero the parameter gradients optim.zero_grad() # Predictions x = inputs.to(device)", "28, 28).to(device)).view(1, 1, 28, 28).cpu() for i in range(batch_size)]) preds = T.tensor(preds) gridshow(torchvision.utils.make_grid(preds))", "Test dataiter = iter(loader) images, _ = dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images))", "decode(self, x): x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13) x =", "MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net", "1:2d}, Batch {i + 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0 #", "# Zero the parameter gradients optim.zero_grad() # Predictions x = inputs.to(device) y =", "loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for", "= Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size)", "npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size,", "convolutional layers # Dataset : MNIST # Requires : PIL, matplotlib # Inspired", "Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model", "x): return self.decode(self.encode(x)) # Hyper params latent_size = 10 hidden_size = 256 epochs", "data : net(data) import os import numpy as np import matplotlib.pyplot as plt", "inputs (no labels) inputs, _ = data # Zero the parameter gradients optim.zero_grad()", "shuffle=True, num_workers=0) # Model net = Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train':", "2) self.decodeFC2 = Linear(14 * 14, 28 * 28) def encode(self, x): x", "dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) #", "# Dataset : MNIST # Requires : PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html", "nn.MSELoss() for e in range(epochs): avg_loss = 0 for i, data in enumerate(loader,", "import cuda import torch.nn.functional as F from torchvision import transforms import torchvision from", "= T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x = F.relu(self.decodeFC1(x)) x = x.view(-1,", "* 14, 28 * 28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x =", "= T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for e in range(epochs): avg_loss", "= Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1", "an image (1 dim tensor) # t has values in [0, 1] def", "torch import nn from torch import cuda import torch.nn.functional as F from torchvision", "x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x =", "= Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1,", "T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28) return x def forward(self, x): return", "from torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im", "in matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class", "= 10 learning_rate = .0002 train_or_test = 'test' path = models_dir + '/deep_autoencoder'", "truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1,", "* 28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x =", "import torch.nn.functional as F from torchvision import transforms import torchvision from torchvision.datasets import", "plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 =", "else: # Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images, _ = dataiter.next()", "0 for i, data in enumerate(loader, 0): # Only inputs (no labels) inputs,", "with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for i", "Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9,", "if train_or_test == 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train", "'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim = T.optim.Adam(net.parameters(),", "torch.nn.functional as F from torchvision import transforms import torchvision from torchvision.datasets import MNIST", "img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__()", "T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for i in range(batch_size)]) preds =", "im from utils import dataset_dir, models_dir # Displays an image (1 dim tensor)", "# Only inputs (no labels) inputs, _ = data # Zero the parameter", "= models_dir + '/deep_autoencoder' # Training device device = T.device('cuda:0' if cuda.is_available() else", "x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x))", "= MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x", "x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28) return x def forward(self,", "Save T.save(net.state_dict(), path) print('Model trained and saved') else: # Load net.load_state_dict(T.load(path)) # Test", "parameter gradients optim.zero_grad() # Predictions x = inputs.to(device) y = net(x) # Back", "preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for i in range(batch_size)])", "= Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13)", "imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1,", "13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 * 14,", "optim.zero_grad() # Predictions x = inputs.to(device) y = net(x) # Back prop loss", "x = x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def", "= x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def decode(self,", "train_or_test == 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim", "28) return x def forward(self, x): return self.decode(self.encode(x)) # Hyper params latent_size =", "# Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data) # To decompress", "== print_freq - 1: print(f'Epoch {e + 1:2d}, Batch {i + 1:5d}, Loss", "# To mutate data : net(data) import os import numpy as np import", "16, 4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 =", "1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for i in range(batch_size)]) preds = T.tensor(preds)", "import os import numpy as np import matplotlib.pyplot as plt import torch as", "cuda import torch.nn.functional as F from torchvision import transforms import torchvision from torchvision.datasets", "= MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model", "data # Zero the parameter gradients optim.zero_grad() # Predictions x = inputs.to(device) y", "MNIST from torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as", ": MNIST # Requires : PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To", "as plt import torch as T from torch import nn from torch import", "mutate data : net(data) import os import numpy as np import matplotlib.pyplot as", "14, 28 * 28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x)))", "optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for e in range(epochs):", "Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16,", "13, 13) x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14) x =", "F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x = F.relu(self.decodeFC1(x)) x", "% print_freq == print_freq - 1: print(f'Epoch {e + 1:2d}, Batch {i +", "ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im from utils import", "torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im from", "range(epochs): avg_loss = 0 for i, data in enumerate(loader, 0): # Only inputs", "Training device device = T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset trans =", "x = inputs.to(device) y = net(x) # Back prop loss = criterion(y, x)", "[0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img): npimg =", "13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 * 14, 28 *", "i, data in enumerate(loader, 0): # Only inputs (no labels) inputs, _ =", "super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32,", "0): # Only inputs (no labels) inputs, _ = data # Zero the", "self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 *", "# Displays an image (1 dim tensor) # t has values in [0,", "net.to(device) if train_or_test == 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') #", "x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x)) x", "x = T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x = F.relu(self.decodeFC1(x)) x =", "train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net =", "# Requires : PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data", "models_dir # Displays an image (1 dim tensor) # t has values in", "_ = dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad():", "= 100 if i % print_freq == print_freq - 1: print(f'Epoch {e +", "MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x =", "def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg,", "To mutate data : net(data) import os import numpy as np import matplotlib.pyplot", ".999)) criterion = nn.MSELoss() for e in range(epochs): avg_loss = 0 for i,", "x = x.view(-1, 1, 28, 28) return x def forward(self, x): return self.decode(self.encode(x))", "avg_loss = 0 for i, data in enumerate(loader, 0): # Only inputs (no", "x.view(-1, 1, 28, 28) return x def forward(self, x): return self.decode(self.encode(x)) # Hyper", "ConvTranspose2d, MaxPool2d import PIL.Image as im from utils import dataset_dir, models_dir # Displays", "Zero the parameter gradients optim.zero_grad() # Predictions x = inputs.to(device) y = net(x)", "nn from torch import cuda import torch.nn.functional as F from torchvision import transforms", "for e in range(epochs): avg_loss = 0 for i, data in enumerate(loader, 0):", "# t has values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show in", "if cuda.is_available() else 'cpu') # Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True,", "= ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 * 14, 28 * 28) def", "prop loss = criterion(y, x) loss.backward() optim.step() avg_loss += loss.item() # Stats print_freq", "matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module):", "decompress data : net.decode(data) # To mutate data : net(data) import os import", "ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28,", "Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im from utils import dataset_dir,", "matplotlib.pyplot as plt import torch as T from torch import nn from torch", "= .0002 train_or_test = 'test' path = models_dir + '/deep_autoencoder' # Training device", "from torch import nn from torch import cuda import torch.nn.functional as F from", "values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img):", "Stats print_freq = 100 if i % print_freq == print_freq - 1: print(f'Epoch", "data : net.decode(data) # To mutate data : net(data) import os import numpy", "def decode(self, x): x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13) x", "import numpy as np import matplotlib.pyplot as plt import torch as T from", "Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1,", "i % print_freq == print_freq - 1: print(f'Epoch {e + 1:2d}, Batch {i", "self.decode(self.encode(x)) # Hyper params latent_size = 10 hidden_size = 256 epochs = 3", "32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size,", "Model net = Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train': # Load if", "1, 28, 28) return x def forward(self, x): return self.decode(self.encode(x)) # Hyper params", "13) x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x))", "# Test dataiter = iter(loader) images, _ = dataiter.next() # Show ground truth", "== 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim =", "PIL.Image as im from utils import dataset_dir, models_dir # Displays an image (1", "tensor) # t has values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show", "print_freq - 1: print(f'Epoch {e + 1:2d}, Batch {i + 1:5d}, Loss {avg_loss", "# Predictions x = inputs.to(device) y = net(x) # Back prop loss =", "trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size,", "net.encode(data) # To decompress data : net.decode(data) # To mutate data : net(data)", "Predictions x = inputs.to(device) y = net(x) # Back prop loss = criterion(y,", "x def decode(self, x): x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13)", "To decompress data : net.decode(data) # To mutate data : net(data) import os", "data : net.encode(data) # To decompress data : net.decode(data) # To mutate data", "F from torchvision import transforms import torchvision from torchvision.datasets import MNIST from torch.nn", "def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module): def", "Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2)", "+= loss.item() # Stats print_freq = 100 if i % print_freq == print_freq", "epochs = 3 batch_size = 10 learning_rate = .0002 train_or_test = 'test' path", "2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13", "Batch {i + 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0 # Save", "(1 dim tensor) # t has values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show()", "# Back prop loss = criterion(y, x) loss.backward() optim.step() avg_loss += loss.item() #", "0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1", "layers # Dataset : MNIST # Requires : PIL, matplotlib # Inspired by", "latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16,", "lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for e in range(epochs): avg_loss = 0", "3 batch_size = 10 learning_rate = .0002 train_or_test = 'test' path = models_dir", "T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for e in range(epochs): avg_loss =", "self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size)", "print(f'Epoch {e + 1:2d}, Batch {i + 1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss", ": net.decode(data) # To mutate data : net(data) import os import numpy as", "x def forward(self, x): return self.decode(self.encode(x)) # Hyper params latent_size = 10 hidden_size", "in range(epochs): avg_loss = 0 for i, data in enumerate(loader, 0): # Only", "return x def forward(self, x): return self.decode(self.encode(x)) # Hyper params latent_size = 10", "= nn.MSELoss() for e in range(epochs): avg_loss = 0 for i, data in", "enumerate(loader, 0): # Only inputs (no labels) inputs, _ = data # Zero", "t has values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib", "import matplotlib.pyplot as plt import torch as T from torch import nn from", "images, _ = dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with", "* 14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28) return x", "has values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def", "T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net = Net(hidden_size, latent_size) net.to(device) if train_or_test", "= 3 batch_size = 10 learning_rate = .0002 train_or_test = 'test' path =", "= img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size):", "import MNIST from torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image", "= transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True,", "avg_loss = 0.0 # Save T.save(net.state_dict(), path) print('Model trained and saved') else: #", "= MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return", "from torchvision.datasets import MNIST from torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d", "= Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train': # Load if os.path.exists(path): net.load_state_dict(T.load(path))", "utils import dataset_dir, models_dir # Displays an image (1 dim tensor) # t", "plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size", "1: print(f'Epoch {e + 1:2d}, Batch {i + 1:5d}, Loss {avg_loss / print_freq:.3f}')", "/ print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(), path) print('Model trained and saved')", "data in enumerate(loader, 0): # Only inputs (no labels) inputs, _ = data", "10 hidden_size = 256 epochs = 3 batch_size = 10 learning_rate = .0002", "= criterion(y, x) loss.backward() optim.step() avg_loss += loss.item() # Stats print_freq = 100", "criterion = nn.MSELoss() for e in range(epochs): avg_loss = 0 for i, data", "by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data) # To decompress data :", "Dataset : MNIST # Requires : PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html #", "= F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x)) x =", "F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1,", "1, 13, 13) x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14) x", "path) print('Model trained and saved') else: # Load net.load_state_dict(T.load(path)) # Test dataiter =", "learning_rate = .0002 train_or_test = 'test' path = models_dir + '/deep_autoencoder' # Training", "x): x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x))", "Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(), path) print('Model trained", "= inputs.to(device) y = net(x) # Back prop loss = criterion(y, x) loss.backward()", ": net(data) import os import numpy as np import matplotlib.pyplot as plt import", "MNIST # Requires : PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress", "= 10 hidden_size = 256 epochs = 3 batch_size = 10 learning_rate =", "1, 2) self.decodeFC2 = Linear(14 * 14, 28 * 28) def encode(self, x):", "= Linear(14 * 14, 28 * 28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x)))", "return x def decode(self, x): x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13,", "* 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 * 14, 28", "dim tensor) # t has values in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() #", "x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14)", "def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800)", "# Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for e", "# Load if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate,", "Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 =", "x = x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14", "torch import cuda import torch.nn.functional as F from torchvision import transforms import torchvision", "# Model net = Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train': # Load", "x = F.relu(self.decodeConv1(x)) x = x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x)) x", "if os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999))", "T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir,", "hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 =", "gradients optim.zero_grad() # Predictions x = inputs.to(device) y = net(x) # Back prop", "Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im from utils import dataset_dir, models_dir #", "from utils import dataset_dir, models_dir # Displays an image (1 dim tensor) #", "params latent_size = 10 hidden_size = 256 epochs = 3 batch_size = 10", "compress data : net.encode(data) # To decompress data : net.decode(data) # To mutate", "- 1: print(f'Epoch {e + 1:2d}, Batch {i + 1:5d}, Loss {avg_loss /", "'/deep_autoencoder' # Training device device = T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset", "torchvision.datasets import MNIST from torch.nn import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import", "hidden_size) self.encodeFC2 = Linear(hidden_size, self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1 =", "Hyper params latent_size = 10 hidden_size = 256 epochs = 3 batch_size =", "Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset,", "To compress data : net.encode(data) # To decompress data : net.decode(data) # To", "saved') else: # Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images, _ =", "y = net(x) # Back prop loss = criterion(y, x) loss.backward() optim.step() avg_loss", "print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(), path) print('Model trained and saved') else:", "cuda.is_available() else 'cpu') # Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True,", "28) def encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1,", "self.latent_size) self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2", "loss = criterion(y, x) loss.backward() optim.step() avg_loss += loss.item() # Stats print_freq =", "x) loss.backward() optim.step() avg_loss += loss.item() # Stats print_freq = 100 if i", "Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images, _ = dataiter.next() # Show", "import transforms import torchvision from torchvision.datasets import MNIST from torch.nn import ReLU, Linear,", "self.decodeFC2 = Linear(14 * 14, 28 * 28) def encode(self, x): x =", "def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16, 4)", "= F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x = F.relu(self.decodeFC1(x))", "PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data) #", "import dataset_dir, models_dir # Displays an image (1 dim tensor) # t has", "hidden_size = 256 epochs = 3 batch_size = 10 learning_rate = .0002 train_or_test", "# Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader =", "(1, 2, 0))) plt.show() class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size =", "for i, data in enumerate(loader, 0): # Only inputs (no labels) inputs, _", ": PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data)", "numpy as np import matplotlib.pyplot as plt import torch as T from torch", "__init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2", "T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for i in", "# Autoencoder using convolutional layers # Dataset : MNIST # Requires : PIL,", "import torch as T from torch import nn from torch import cuda import", "_ = data # Zero the parameter gradients optim.zero_grad() # Predictions x =", "= 'test' path = models_dir + '/deep_autoencoder' # Training device device = T.device('cuda:0'", "x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28)", "transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0)", "optim.step() avg_loss += loss.item() # Stats print_freq = 100 if i % print_freq", "# Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28,", "x.view(-1, 800) x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def decode(self, x):", "= T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu() for i in range(batch_size)]) preds", "T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x = F.relu(self.decodeFC1(x)) x = x.view(-1, 1,", "F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x)) x = x.view(-1,", "torch as T from torch import nn from torch import cuda import torch.nn.functional", "https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data) # To decompress data : net.decode(data)", "= 256 epochs = 3 batch_size = 10 learning_rate = .0002 train_or_test =", "return self.decode(self.encode(x)) # Hyper params latent_size = 10 hidden_size = 256 epochs =", "train_or_test = 'test' path = models_dir + '/deep_autoencoder' # Training device device =", "0.0 # Save T.save(net.state_dict(), path) print('Model trained and saved') else: # Load net.load_state_dict(T.load(path))", "in enumerate(loader, 0): # Only inputs (no labels) inputs, _ = data #", "= iter(loader) images, _ = dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show", "import PIL.Image as im from utils import dataset_dir, models_dir # Displays an image", "T from torch import nn from torch import cuda import torch.nn.functional as F", "the parameter gradients optim.zero_grad() # Predictions x = inputs.to(device) y = net(x) #", "28, 28) return x def forward(self, x): return self.decode(self.encode(x)) # Hyper params latent_size", "if i % print_freq == print_freq - 1: print(f'Epoch {e + 1:2d}, Batch", "as T from torch import nn from torch import cuda import torch.nn.functional as", "'test' path = models_dir + '/deep_autoencoder' # Training device device = T.device('cuda:0' if", "in [0, 1] def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img): npimg", "torchvision import transforms import torchvision from torchvision.datasets import MNIST from torch.nn import ReLU,", "using convolutional layers # Dataset : MNIST # Requires : PIL, matplotlib #", "= T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset trans = transforms.ToTensor() dataset =", "self.latent_size = latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32, 2)", "100 if i % print_freq == print_freq - 1: print(f'Epoch {e + 1:2d},", "Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1, 28, 28).cpu()", "and saved') else: # Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images, _", "= T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net = Net(hidden_size, latent_size) net.to(device) if", "Displays an image (1 dim tensor) # t has values in [0, 1]", "= dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds", "# Load net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images, _ = dataiter.next() #", "Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data : net.encode(data) # To decompress data", "latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 =", "image (1 dim tensor) # t has values in [0, 1] def imshow(t):", "= T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28, 28) return x def forward(self, x):", "iter(loader) images, _ = dataiter.next() # Show ground truth gridshow(torchvision.utils.make_grid(images)) # Show predictions", "os import numpy as np import matplotlib.pyplot as plt import torch as T", "e in range(epochs): avg_loss = 0 for i, data in enumerate(loader, 0): #", "loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net = Net(hidden_size, latent_size) net.to(device)", "transforms import torchvision from torchvision.datasets import MNIST from torch.nn import ReLU, Linear, Sigmoid,", "'cpu') # Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans) loader", "transform=trans) loader = T.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0) # Model net = Net(hidden_size, latent_size)", "x = x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1,", "MaxPool2d import PIL.Image as im from utils import dataset_dir, models_dir # Displays an", "x = F.relu(self.encodeFC1(x)) x = T.sigmoid(self.encodeFC2(x)) return x def decode(self, x): x =", "class Net(nn.Module): def __init__(self, hidden_size, latent_size): super().__init__() self.latent_size = latent_size self.encodeConv1 = Conv2d(1,", "T.save(net.state_dict(), path) print('Model trained and saved') else: # Load net.load_state_dict(T.load(path)) # Test dataiter", "1] def imshow(t): transforms.ToPILImage()(t).show() # Show in matplotlib def gridshow(img): npimg = img.numpy()", "(no labels) inputs, _ = data # Zero the parameter gradients optim.zero_grad() #", "batch_size = 10 learning_rate = .0002 train_or_test = 'test' path = models_dir +", "# Stats print_freq = 100 if i % print_freq == print_freq - 1:", "net(data) import os import numpy as np import matplotlib.pyplot as plt import torch", "= F.relu(self.decodeFC1(x)) x = x.view(-1, 1, 13, 13) x = F.relu(self.decodeConv1(x)) x =", "batch_size=batch_size, shuffle=True, num_workers=0) # Model net = Net(hidden_size, latent_size) net.to(device) if train_or_test ==", "self.decodeFC1 = Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 =", "256 epochs = 3 batch_size = 10 learning_rate = .0002 train_or_test = 'test'", "# Save T.save(net.state_dict(), path) print('Model trained and saved') else: # Load net.load_state_dict(T.load(path)) #", "net.load_state_dict(T.load(path)) # Test dataiter = iter(loader) images, _ = dataiter.next() # Show ground", "os.path.exists(path): net.load_state_dict(T.load(path)) print('Model loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion", "= net(x) # Back prop loss = criterion(y, x) loss.backward() optim.step() avg_loss +=", "1:5d}, Loss {avg_loss / print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(), path) print('Model", "{avg_loss / print_freq:.3f}') avg_loss = 0.0 # Save T.save(net.state_dict(), path) print('Model trained and", "Autoencoder using convolutional layers # Dataset : MNIST # Requires : PIL, matplotlib", "self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 = Linear(800,", "4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1 = Linear(800, hidden_size) self.encodeFC2 = Linear(hidden_size,", "Show in matplotlib def gridshow(img): npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.show()", "else 'cpu') # Dataset trans = transforms.ToTensor() dataset = MNIST(root=dataset_dir, train=True, download=True, transform=trans)", "gridshow(torchvision.utils.make_grid(images)) # Show predictions with T.no_grad(): preds = T.cat([net(images[i].view(1, 1, 28, 28).to(device)).view(1, 1,", "as im from utils import dataset_dir, models_dir # Displays an image (1 dim", "encode(self, x): x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x", "np import matplotlib.pyplot as plt import torch as T from torch import nn", ": net.encode(data) # To decompress data : net.decode(data) # To mutate data :", "Linear(self.latent_size, 13 * 13) self.decodeConv1 = ConvTranspose2d(1, 1, 2) self.decodeFC2 = Linear(14 *", "# To decompress data : net.decode(data) # To mutate data : net(data) import", "loss.item() # Stats print_freq = 100 if i % print_freq == print_freq -", "inputs, _ = data # Zero the parameter gradients optim.zero_grad() # Predictions x", "from torchvision import transforms import torchvision from torchvision.datasets import MNIST from torch.nn import", "= x.view(-1, 14 * 14) x = T.sigmoid(self.decodeFC2(x)) x = x.view(-1, 1, 28,", "net.decode(data) # To mutate data : net(data) import os import numpy as np", "x = MaxPool2d(2)(F.relu(self.encodeConv1(x))) x = MaxPool2d(2)(F.relu(self.encodeConv2(x))) x = x.view(-1, 800) x = F.relu(self.encodeFC1(x))", "from torch import cuda import torch.nn.functional as F from torchvision import transforms import", "Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion = nn.MSELoss() for e in", "labels) inputs, _ = data # Zero the parameter gradients optim.zero_grad() # Predictions", "# Training device device = T.device('cuda:0' if cuda.is_available() else 'cpu') # Dataset trans", "num_workers=0) # Model net = Net(hidden_size, latent_size) net.to(device) if train_or_test == 'train': #", "= 0.0 # Save T.save(net.state_dict(), path) print('Model trained and saved') else: # Load", "as np import matplotlib.pyplot as plt import torch as T from torch import", "import ReLU, Linear, Sigmoid, Conv2d, ConvTranspose2d, MaxPool2d import PIL.Image as im from utils", "Requires : PIL, matplotlib # Inspired by https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html # To compress data :", "inputs.to(device) y = net(x) # Back prop loss = criterion(y, x) loss.backward() optim.step()", "= x.view(-1, 1, 28, 28) return x def forward(self, x): return self.decode(self.encode(x)) #", "Back prop loss = criterion(y, x) loss.backward() optim.step() avg_loss += loss.item() # Stats", "criterion(y, x) loss.backward() optim.step() avg_loss += loss.item() # Stats print_freq = 100 if", "= latent_size self.encodeConv1 = Conv2d(1, 16, 4) self.encodeConv2 = Conv2d(16, 32, 2) self.encodeFC1", "latent_size = 10 hidden_size = 256 epochs = 3 batch_size = 10 learning_rate", "net.load_state_dict(T.load(path)) print('Model loaded') # Train optim = T.optim.Adam(net.parameters(), lr=learning_rate, betas=(.9, .999)) criterion =", "path = models_dir + '/deep_autoencoder' # Training device device = T.device('cuda:0' if cuda.is_available()", "= 0 for i, data in enumerate(loader, 0): # Only inputs (no labels)", "print_freq = 100 if i % print_freq == print_freq - 1: print(f'Epoch {e" ]
[ "in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for resource in", "connection.get_only_instances() or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time,", "KIND, either express or implied. # See the License for the specific language", "resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource):", "Unless required by applicable law or agreed to in writing, software # distributed", "{message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return", "from monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self):", "import EC2ResponseError from monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"]", "resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc:", "Warning(\"state '{0}' is a valid target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if", "{status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except", "this file except in compliance with the License. # You may obtain a", "return instances class Volume(Handler): def fetch_region_names(self): return [region.name for region in ec2.regions()] def", "relentlessly. # Copyright 2015 Immobilien Scout GmbH # # Licensed under the Apache", "instance found in {region.name}, \" \\ \"with identifier {id}, instance type is {instance_type},", "and Destroy unwanted AWS Resources relentlessly. # Copyright 2015 Immobilien Scout GmbH #", "resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE '", "from boto.exception import EC2ResponseError from monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES =", "Search and Destroy unwanted AWS Resources relentlessly. # Copyright 2015 Immobilien Scout GmbH", "ANY KIND, either express or implied. # See the License for the specific", "\"with identifier {id}, created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource):", "self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ebs", "\\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try:", "'{0}' is a valid target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run:", "target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "\\ \"with identifier {id}, instance type is {instance_type}, created {launch_time}, \" \\ \"dnsname", "resource_wrapper def to_string(self, resource): return \"ec2 instance found in {region.name}, \" \\ \"with", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "{launch_time}, \" \\ \"dnsname is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self,", "resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue", "OF ANY KIND, either express or implied. # See the License for the", "except EC2ResponseError as exc: if exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc))))", "creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper", "= connection.get_only_instances() or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id,", "if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid target state, skipping\".format(", "resources = connection.get_all_volumes() or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type,", "if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if exc.status == 412:", "Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper))", "AWS Resources relentlessly. # Copyright 2015 Immobilien Scout GmbH # # Licensed under", "# Copyright 2015 Immobilien Scout GmbH # # Licensed under the Apache License,", "= ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for resource in resources: resource_wrapper =", "instances class Volume(Handler): def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self):", "connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if", "# Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "is a valid target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try:", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "+ self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ebs volume found in", "if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if exc.status == 412:", "def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name in", "412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False)", "EC2ResponseError as exc: if exc.status == 412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc)))", "== 412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id],", "2015 Immobilien Scout GmbH # # Licensed under the Apache License, Version 2.0", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "{region.name}, \" \\ \"with identifier {id}, instance type is {instance_type}, created {launch_time}, \"", "ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if exc.status ==", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for resource", "in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources:", "# Monocyte - Search and Destroy unwanted AWS Resources relentlessly. # Copyright 2015", "GmbH # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "Copyright 2015 Immobilien Scout GmbH # # Licensed under the Apache License, Version", "to_string(self, resource): return \"ebs volume found in {region.name}, \" \\ \"with identifier {id},", "required by applicable law or agreed to in writing, software # distributed under", "\" \\ \"with identifier {id}, instance type is {instance_type}, created {launch_time}, \" \\", "created {launch_time}, \" \\ \"dnsname is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def", "import warnings from boto import ec2 from boto.exception import EC2ResponseError from monocyte.handler import", "applicable law or agreed to in writing, software # distributed under the License", "= [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self):", "ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if exc.status ==", "warnings from boto import ec2 from boto.exception import EC2ResponseError from monocyte.handler import Resource,", "or agreed to in writing, software # distributed under the License is distributed", "is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in", "\"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id,", "resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE '", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "import ec2 from boto.exception import EC2ResponseError from monocyte.handler import Resource, Handler class Instance(Handler):", "\" \\ \"dnsname is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource):", "resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid target state, skipping\".format( resource.wrapped.state))", "if exc.status == 412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances", "for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "writing, software # distributed under the License is distributed on an \"AS IS\"", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "Monocyte - Search and Destroy unwanted AWS Resources relentlessly. # Copyright 2015 Immobilien", "License. # You may obtain a copy of the License at # #", "governing permissions and # limitations under the License. import warnings from boto import", "self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ec2 instance", "Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for region", "compliance with the License. # You may obtain a copy of the License", "in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return", "resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in", "[region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection =", "= ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if exc.status", "for the specific language governing permissions and # limitations under the License. import", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "except EC2ResponseError as exc: if exc.status == 412: # Precondition Failed raise Warning(\"Termination", "License. import warnings from boto import ec2 from boto.exception import EC2ResponseError from monocyte.handler", "dry_run=True) except EC2ResponseError as exc: if exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination", "ec2 from boto.exception import EC2ResponseError from monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES", "in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources:", "if exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion", "Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion of EBS volume {0}\".format(resource.wrapped.id)) connection.delete_volume(resource.wrapped.id, dry_run=False)", "continue yield resource_wrapper def to_string(self, resource): return \"ebs volume found in {region.name}, \"", "not use this file except in compliance with the License. # You may", "Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper))", "= connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances class Volume(Handler): def", "boto import ec2 from boto.exception import EC2ResponseError from monocyte.handler import Resource, Handler class", "License, Version 2.0 (the \"License\"); # you may not use this file except", "resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid target state,", "delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid target", "Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid target state, skipping\".format( resource.wrapped.state)) connection =", "raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances", "# you may not use this file except in compliance with the License.", "VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for region in ec2.regions()] def", "return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection", "= connection.get_all_volumes() or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id,", "Destroy unwanted AWS Resources relentlessly. # Copyright 2015 Immobilien Scout GmbH # #", "agreed to in writing, software # distributed under the License is distributed on", "the License. import warnings from boto import ec2 from boto.exception import EC2ResponseError from", "try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if exc.status == 412: # Precondition", "\"ec2 instance found in {region.name}, \" \\ \"with identifier {id}, instance type is", "exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion of", "(the \"License\"); # you may not use this file except in compliance with", "resources = connection.get_only_instances() or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type,", "Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances))", "from boto import ec2 from boto.exception import EC2ResponseError from monocyte.handler import Resource, Handler", "# Unless required by applicable law or agreed to in writing, software #", "import Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name", "by applicable law or agreed to in writing, software # distributed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "boto.exception import EC2ResponseError from monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\",", "Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion of EBS volume {0}\".format(resource.wrapped.id)) connection.delete_volume(resource.wrapped.id,", "in {region.name}, \" \\ \"with identifier {id}, instance type is {instance_type}, created {launch_time},", "yield resource_wrapper def to_string(self, resource): return \"ec2 instance found in {region.name}, \" \\", "file except in compliance with the License. # You may obtain a copy", "resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue", "resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield", "License for the specific language governing permissions and # limitations under the License.", "for {0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self): return [region.name for region in", "and # limitations under the License. import warnings from boto import ec2 from", "\"with identifier {id}, instance type is {instance_type}, created {launch_time}, \" \\ \"dnsname is", "to in writing, software # distributed under the License is distributed on an", "self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for resource in resources:", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "return \"ebs volume found in {region.name}, \" \\ \"with identifier {id}, created {create_time},", "language governing permissions and # limitations under the License. import warnings from boto", "unwanted AWS Resources relentlessly. # Copyright 2015 Immobilien Scout GmbH # # Licensed", "or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name)", "' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ec2 instance found", "instance type is {instance_type}, created {launch_time}, \" \\ \"dnsname is {public_dns_name}, key {key_name},", "raise Warning(\"state '{0}' is a valid target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region)", "or implied. # See the License for the specific language governing permissions and", "dry_run=True) except EC2ResponseError as exc: if exc.status == 412: # Precondition Failed raise", "exc.status == 412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances =", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ebs volume", "{key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state", "limitations under the License. import warnings from boto import ec2 from boto.exception import", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "\"ebs volume found in {region.name}, \" \\ \"with identifier {id}, created {create_time}, \"", "self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if exc.status == 412: #", "= ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for resource in resources: resource_wrapper =", "exc: if exc.status == 412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else:", "dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self): return", "def to_string(self, resource): return \"ebs volume found in {region.name}, \" \\ \"with identifier", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for resource in resources: resource_wrapper = Resource(resource=resource,", "in writing, software # distributed under the License is distributed on an \"AS", "state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError", "identifier {id}, instance type is {instance_type}, created {launch_time}, \" \\ \"dnsname is {public_dns_name},", "{0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self): return [region.name for region in ec2.regions()]", "connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self):", "= ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if exc.status", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name)", "connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if exc.status == 412: # Precondition Failed", "' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ebs volume found", "412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion of EBS volume", "type is {instance_type}, created {launch_time}, \" \\ \"dnsname is {public_dns_name}, key {key_name}, with", "identifier {id}, created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection", "fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or []", "\\ \"dnsname is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes()", "resource): return \"ec2 instance found in {region.name}, \" \\ \"with identifier {id}, instance", "exc: if exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating", "- Search and Destroy unwanted AWS Resources relentlessly. # Copyright 2015 Immobilien Scout", "class Volume(Handler): def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for", "use this file except in compliance with the License. # You may obtain", "= Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' +", "Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown", "volume found in {region.name}, \" \\ \"with identifier {id}, created {create_time}, \" \\", "the specific language governing permissions and # limitations under the License. import warnings", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise", "{region.name}, \" \\ \"with identifier {id}, created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped))", "2.0 (the \"License\"); # you may not use this file except in compliance", "# limitations under the License. import warnings from boto import ec2 from boto.exception", "[] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if", "{create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region) if", "region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for resource", "\" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run:", "shutdown sequence for {0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self): return [region.name for", "ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for resource in resources: resource_wrapper = Resource(resource=resource,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "continue yield resource_wrapper def to_string(self, resource): return \"ec2 instance found in {region.name}, \"", "specific language governing permissions and # limitations under the License. import warnings from", "instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances class Volume(Handler):", "self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ec2", "# # Unless required by applicable law or agreed to in writing, software", "Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for", "for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id", "express or implied. # See the License for the specific language governing permissions", "resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE", "permissions and # limitations under the License. import warnings from boto import ec2", "[] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name) if", "{_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a", "Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for region in ec2.regions()]", "either express or implied. # See the License for the specific language governing", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for region in", "region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def", "is {instance_type}, created {launch_time}, \" \\ \"dnsname is {public_dns_name}, key {key_name}, with state", "raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for", "resource): return \"ebs volume found in {region.name}, \" \\ \"with identifier {id}, created", "the License. # You may obtain a copy of the License at #", "state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "EC2ResponseError as exc: if exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise", "{id}, instance type is {instance_type}, created {launch_time}, \" \\ \"dnsname is {public_dns_name}, key", "region in ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources", "EC2ResponseError from monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def", "in ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources =", "fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names:", "else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances class", "return \"ec2 instance found in {region.name}, \" \\ \"with identifier {id}, instance type", "\" \\ \"with identifier {id}, created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def", "under the License. import warnings from boto import ec2 from boto.exception import EC2ResponseError", "self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ebs volume found in {region.name},", "with the License. # You may obtain a copy of the License at", "monocyte.handler import Resource, Handler class Instance(Handler): VALID_TARGET_STATES = [\"terminated\", \"shutting-down\"] def fetch_region_names(self): return", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "[\"terminated\", \"shutting-down\"] def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for", "connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or [] for resource in resources: resource_wrapper", "sequence for {0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self): return [region.name for region", "created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region)", "\"dnsname is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state", "# Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion of EBS volume {0}\".format(resource.wrapped.id))", "== 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else: self.logger.info(\"Initiating deletion of EBS", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or []", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield", "in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for resource in", "Resources relentlessly. # Copyright 2015 Immobilien Scout GmbH # # Licensed under the", "Volume(Handler): def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name", "Scout GmbH # # Licensed under the Apache License, Version 2.0 (the \"License\");", "ec2.regions()] def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances()", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE", "{instance_type}, created {launch_time}, \" \\ \"dnsname is {public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped))", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for", "def to_string(self, resource): return \"ec2 instance found in {region.name}, \" \\ \"with identifier", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"shutting-down\"] def fetch_region_names(self): return [region.name for region in ec2.regions()] def fetch_unwanted_resources(self): for region_name", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as", "valid target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True)", "self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for resource in resources:", "self.logger.info(\"Initiating shutdown sequence for {0}\".format(instances)) return instances class Volume(Handler): def fetch_region_names(self): return [region.name", "resource_wrapper def to_string(self, resource): return \"ebs volume found in {region.name}, \" \\ \"with", "with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}'", "Immobilien Scout GmbH # # Licensed under the Apache License, Version 2.0 (the", "to_string(self, resource): return \"ec2 instance found in {region.name}, \" \\ \"with identifier {id},", "resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in", "def delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError", "{public_dns_name}, key {key_name}, with state {_state}\".format(**vars(resource.wrapped)) def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES:", "Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise else: instances = connection.terminate_instances([resource.wrapped.id], dry_run=False) self.logger.info(\"Initiating shutdown sequence", "self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if exc.status == 412: #", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "= Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' +", "+ self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ec2 instance found in", "def delete(self, resource): if resource.wrapped.state in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "yield resource_wrapper def to_string(self, resource): return \"ebs volume found in {region.name}, \" \\", "def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_only_instances() or", "def fetch_unwanted_resources(self): for region_name in self.region_names: connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or", "for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.launch_time, region=region_name) if resource.id", "found in {region.name}, \" \\ \"with identifier {id}, instance type is {instance_type}, created", "state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True)", "connection.get_all_volumes() or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time,", "as exc: if exc.status == 412: # Precondition Failed raise Warning(\"Termination {message}\".format(**vars(exc))) raise", "connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if exc.status == 412: # Precondition Failed", "\\ \"with identifier {id}, created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self,", "found in {region.name}, \" \\ \"with identifier {id}, created {create_time}, \" \\ \"with", "as exc: if exc.status == 412: # Precondition Failed warnings.warn(Warning(\"Termination {message}\".format(**vars(exc)))) raise else:", "or [] for resource in resources: resource_wrapper = Resource(resource=resource, resource_type=self.resource_type, resource_id=resource.id, creation_date=resource.create_time, region=region_name)", "self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self, resource): return \"ec2 instance found in {region.name},", "try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc: if exc.status == 412: # Precondition", "a valid target state, skipping\".format( resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id],", "in Instance.VALID_TARGET_STATES: raise Warning(\"state '{0}' is a valid target state, skipping\".format( resource.wrapped.state)) connection", "if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper def to_string(self,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "in {region.name}, \" \\ \"with identifier {id}, created {create_time}, \" \\ \"with state", "connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.delete_volume(resource.wrapped.id, dry_run=True) except EC2ResponseError as exc: if", "resource.wrapped.state)) connection = ec2.connect_to_region(resource.region) if self.dry_run: try: connection.terminate_instances([resource.wrapped.id], dry_run=True) except EC2ResponseError as exc:", "{id}, created {create_time}, \" \\ \"with state {status}\".format(**vars(resource.wrapped)) def delete(self, resource): connection =", "connection = ec2.connect_to_region(region_name) resources = connection.get_all_volumes() or [] for resource in resources: resource_wrapper", "creation_date=resource.launch_time, region=region_name) if resource.id in self.ignored_resources: self.logger.info('IGNORE ' + self.to_string(resource_wrapper)) continue yield resource_wrapper" ]
[ "from vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta: model = Venda fields =", "<gh_stars>0 from django import forms from vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta:", "from django import forms from vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta: model", "forms from vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta: model = Venda fields", "vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta: model = Venda fields = ('nome_client','telefone','cidade','email','produto','deu',)", "import forms from vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta: model = Venda", "django import forms from vendas.models import Venda class VendaAdminForm(forms.ModelForm): class Meta: model =" ]
[ "def run(argv): if not 2 <= len(argv) <= 3: print('error: bad argument count',", "print('note: base set to {}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif argv[1]", "== 'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr) usage(argv[0]) if __name__ ==", "a valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2: base", "b'': break def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode | decode}}", "1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError: print('error: \"{}\" is", "ValueError: print('error: \"{}\" is not a valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0])", "base is None: base = 54 else: print('note: base set to {}'.format(base), file=sys.stderr)", "def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr)", "to {}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base)", "a valid base (must be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except", "is not a valid base (must be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr)", "in range(base) b = sys.stdin.buffer.read(1) if b == b'': break def run_decode(base): raise", "not 1 < base < 1000: print('error: \"{}\" is not a valid base", "None: base = os.environ.get('DECKCODE_BASE') if base is None: base = 54 else: print('note:", "bad argument count', file=sys.stderr) usage(argv[0]) base = None try: base = int(argv[2]) if", "'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr)", "| decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not 2 <= len(argv) <=", "integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError: print('error:", "1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError: print('error: \"{}\" is not a", "elif argv[1] == 'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr) usage(argv[0]) if", "int(argv[2]) if not 1 < base < 1000: print('error: \"{}\" is not a", "valid base (must be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError:", "be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if base", "file=sys.stderr) usage(argv[0]) if len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if base is None:", "count', file=sys.stderr) usage(argv[0]) base = None try: base = int(argv[2]) if not 1", "< base < 1000: print('error: \"{}\" is not a valid base (must be", "2 <= len(argv) <= 3: print('error: bad argument count', file=sys.stderr) usage(argv[0]) base =", "except ValueError: print('error: \"{}\" is not a valid base (must be integer)'.format(argv[2]), file=sys.stderr)", "len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if base is None: base = os.environ.get('DECKCODE_BASE')", "2: base = os.environ.get('DECKCODE_BASE') if base is None: base = os.environ.get('DECKCODE_BASE') if base", "is None: base = 54 else: print('note: base set to {}'.format(base), file=sys.stderr) if", "sys.stdin.buffer.read(1) if b == b'': break def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE:", "exit(1) def run(argv): if not 2 <= len(argv) <= 3: print('error: bad argument", "{} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not 2 <=", "NotImplementedError() def usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv):", "base = int(argv[2]) if not 1 < base < 1000: print('error: \"{}\" is", "None try: base = int(argv[2]) if not 1 < base < 1000: print('error:", "base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2: base = os.environ.get('DECKCODE_BASE')", "b == b'': break def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode", "be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError:", "<= len(argv) <= 3: print('error: bad argument count', file=sys.stderr) usage(argv[0]) base = None", "pass except ValueError: print('error: \"{}\" is not a valid base (must be integer)'.format(argv[2]),", "not a valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2:", "i in range(base) b = sys.stdin.buffer.read(1) if b == b'': break def run_decode(base):", "IndexError: pass except ValueError: print('error: \"{}\" is not a valid base (must be", "usage(argv[0]) base = None try: base = int(argv[2]) if not 1 < base", "b = sys.stdin.buffer.read(1) if b == b'': break def run_decode(base): raise NotImplementedError() def", "\"{}\" is not a valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv)", "1000: print('error: \"{}\" is not a valid base (must be integer between 1", "{{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not 2 <= len(argv)", "== b'': break def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode |", "< 1000: print('error: \"{}\" is not a valid base (must be integer between", "os import sys def run_encode(base): x = 0 for i in range(base) b", "usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not", "1 < base < 1000: print('error: \"{}\" is not a valid base (must", "run_encode(base): x = 0 for i in range(base) b = sys.stdin.buffer.read(1) if b", "base < 1000: print('error: \"{}\" is not a valid base (must be integer", "(must be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except", "range(base) b = sys.stdin.buffer.read(1) if b == b'': break def run_decode(base): raise NotImplementedError()", "base = os.environ.get('DECKCODE_BASE') if base is None: base = 54 else: print('note: base", "run(argv): if not 2 <= len(argv) <= 3: print('error: bad argument count', file=sys.stderr)", "\"{}\" is not a valid base (must be integer between 1 and 1000)'.format(argv[2]),", "run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr) usage(argv[0]) if __name__ == '__main__': run(sys.argv)", "os.environ.get('DECKCODE_BASE') if base is None: base = os.environ.get('DECKCODE_BASE') if base is None: base", "<= 3: print('error: bad argument count', file=sys.stderr) usage(argv[0]) base = None try: base", "[<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not 2 <= len(argv) <= 3: print('error:", "54 else: print('note: base set to {}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base)", "base = os.environ.get('DECKCODE_BASE') if base is None: base = os.environ.get('DECKCODE_BASE') if base is", "integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if base is", "{}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base) else:", "argument count', file=sys.stderr) usage(argv[0]) base = None try: base = int(argv[2]) if not", "base = 54 else: print('note: base set to {}'.format(base), file=sys.stderr) if argv[1] ==", "if len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if base is None: base =", "if b == b'': break def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {}", "if base is None: base = os.environ.get('DECKCODE_BASE') if base is None: base =", "len(argv) <= 3: print('error: bad argument count', file=sys.stderr) usage(argv[0]) base = None try:", "print('error: \"{}\" is not a valid base (must be integer between 1 and", "try: base = int(argv[2]) if not 1 < base < 1000: print('error: \"{}\"", "def usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if", "raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def", "3: print('error: bad argument count', file=sys.stderr) usage(argv[0]) base = None try: base =", "file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base) else: print('error:", "= int(argv[2]) if not 1 < base < 1000: print('error: \"{}\" is not", "if base is None: base = 54 else: print('note: base set to {}'.format(base),", "run_encode(base) elif argv[1] == 'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr) usage(argv[0])", "and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError: print('error: \"{}\" is not", "= os.environ.get('DECKCODE_BASE') if base is None: base = os.environ.get('DECKCODE_BASE') if base is None:", "file=sys.stderr) usage(argv[0]) base = None try: base = int(argv[2]) if not 1 <", "== 2: base = os.environ.get('DECKCODE_BASE') if base is None: base = os.environ.get('DECKCODE_BASE') if", "usage(argv[0]) except IndexError: pass except ValueError: print('error: \"{}\" is not a valid base", "= 54 else: print('note: base set to {}'.format(base), file=sys.stderr) if argv[1] == 'encode':", "is not a valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) ==", "decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not 2 <= len(argv) <= 3:", "base set to {}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif argv[1] ==", "def run_encode(base): x = 0 for i in range(base) b = sys.stdin.buffer.read(1) if", "valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2: base =", "'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr) usage(argv[0]) if __name__ == '__main__':", "not a valid base (must be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0])", "python3 import common import os import sys def run_encode(base): x = 0 for", "run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1)", "(must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if", "import common import os import sys def run_encode(base): x = 0 for i", "sys def run_encode(base): x = 0 for i in range(base) b = sys.stdin.buffer.read(1)", "file=sys.stderr) exit(1) def run(argv): if not 2 <= len(argv) <= 3: print('error: bad", "argv[1] == 'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base) else: print('error: unknown command", "except IndexError: pass except ValueError: print('error: \"{}\" is not a valid base (must", "x = 0 for i in range(base) b = sys.stdin.buffer.read(1) if b ==", "is None: base = os.environ.get('DECKCODE_BASE') if base is None: base = 54 else:", "common import os import sys def run_encode(base): x = 0 for i in", "import os import sys def run_encode(base): x = 0 for i in range(base)", "usage(argv[0]) if len(argv) == 2: base = os.environ.get('DECKCODE_BASE') if base is None: base", "argv[1] == 'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]), file=sys.stderr) usage(argv[0]) if __name__", "os.environ.get('DECKCODE_BASE') if base is None: base = 54 else: print('note: base set to", "for i in range(base) b = sys.stdin.buffer.read(1) if b == b'': break def", "if not 2 <= len(argv) <= 3: print('error: bad argument count', file=sys.stderr) usage(argv[0])", "= 0 for i in range(base) b = sys.stdin.buffer.read(1) if b == b'':", "if not 1 < base < 1000: print('error: \"{}\" is not a valid", "print('error: bad argument count', file=sys.stderr) usage(argv[0]) base = None try: base = int(argv[2])", "base is None: base = os.environ.get('DECKCODE_BASE') if base is None: base = 54", "== 'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base) else: print('error: unknown command \"{}\"'.format(argv[1]),", "base (must be integer between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass", "= None try: base = int(argv[2]) if not 1 < base < 1000:", "not 2 <= len(argv) <= 3: print('error: bad argument count', file=sys.stderr) usage(argv[0]) base", "base = None try: base = int(argv[2]) if not 1 < base <", "= sys.stdin.buffer.read(1) if b == b'': break def run_decode(base): raise NotImplementedError() def usage(progname):", "file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError: print('error: \"{}\" is not a valid", "= os.environ.get('DECKCODE_BASE') if base is None: base = 54 else: print('note: base set", "if argv[1] == 'encode': run_encode(base) elif argv[1] == 'decode': run_decode(base) else: print('error: unknown", "0 for i in range(base) b = sys.stdin.buffer.read(1) if b == b'': break", "set to {}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif argv[1] == 'decode':", "import sys def run_encode(base): x = 0 for i in range(base) b =", "print('error: \"{}\" is not a valid base (must be integer)'.format(argv[2]), file=sys.stderr) usage(argv[0]) if", "break def run_decode(base): raise NotImplementedError() def usage(progname): print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname),", "None: base = 54 else: print('note: base set to {}'.format(base), file=sys.stderr) if argv[1]", "#!/usr/bin/env python3 import common import os import sys def run_encode(base): x = 0", "between 1 and 1000)'.format(argv[2]), file=sys.stderr) usage(argv[0]) except IndexError: pass except ValueError: print('error: \"{}\"", "else: print('note: base set to {}'.format(base), file=sys.stderr) if argv[1] == 'encode': run_encode(base) elif", "print('USAGE: {} {{encode | decode}} [<BASENUM>]'.format(progname), file=sys.stderr) exit(1) def run(argv): if not 2" ]
[ "letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for i in range(64)) email = premail", "+ quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl,", "\"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url = noncerl, data", "= \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url = noncerl, data = noncedata)", "synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\" + email[:8] + \".mp3\")", "headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf", "+ \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url = noncerl,", "''.join(random.choice(letters) for i in range(64)) email = premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\"", "requests.post(url = noncerl, data = noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata", "random import json import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" +", "json import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename +", "noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid +", "import json import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename", "+ \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type':", "noncer = requests.post(url = noncerl, data = noncedata) nonce = noncer.text[10:50] synthrl =", "import requests import string import urllib import random import json import sys def", "IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for i in", "\"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for i in range(64)) email =", "= \"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters =", "pathlib import Path import requests import string import urllib import random import json", "= \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\"", "acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError: voiceid", "\"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\"", "noncerl, data = noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\"", "synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email", "email = premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer =", "for i in range(64)) email = premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata", "1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\" + email[:8] +", "synthrl, data = synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\",", "i in range(64)) email = premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata =", "voicename + \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail =", "= synthrl, data = synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf =", "noncedata = {'googleid':email} noncer = requests.post(url = noncerl, data = noncedata) nonce =", "voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for i in range(64))", "except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for i", "\"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\"", "range(64)) email = premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer", "\"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for", "in range(64)) email = premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email}", "string import urllib import random import json import sys def acapyla(quote, voicename=\"willfromafar\"): try:", "= noncerl, data = noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata =", "quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data", "+ email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr", "def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError:", "from pathlib import Path import requests import string import urllib import random import", "+ \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data =", "\"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data = synthdata,", "+ \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url", "= string.ascii_lowercase premail = ''.join(random.choice(letters) for i in range(64)) email = premail +", "nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers =", "requests.post(url = synthrl, data = synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf", "string.ascii_lowercase premail = ''.join(random.choice(letters) for i in range(64)) email = premail + \"@gmail.com\"", "data = synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0]", "minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult,", "= {'googleid':email} noncer = requests.post(url = noncerl, data = noncedata) nonce = noncer.text[10:50]", "noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url = noncerl, data =", "import urllib import random import json import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid", "= synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home())", "minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\" + email[:8] + \".mp3\") return email[:8]", "try: voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\"", "premail = ''.join(random.choice(letters) for i in range(64)) email = premail + \"@gmail.com\" noncerl", "sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\" except", "headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data = synthdata, headers", "import random import json import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\"", "import Path import requests import string import urllib import random import json import", "synthr = requests.post(url = synthrl, data = synthdata, headers = headers) minuspre =", "= \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters) for i in range(64)) email", "synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) +", "urllib import random import json import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid =", "voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError: voiceid =", "email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr =", "import sys def acapyla(quote, voicename=\"willfromafar\"): try: voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\"", "= \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email +", "\"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url =", "= headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf +", "= synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult", "= minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\" + email[:8] + \".mp3\") return", "{'googleid':email} noncer = requests.post(url = noncerl, data = noncedata) nonce = noncer.text[10:50] synthrl", "voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote", "synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce +", "\"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" +", "= noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid", "= ''.join(random.choice(letters) for i in range(64)) email = premail + \"@gmail.com\" noncerl =", "import string import urllib import random import json import sys def acapyla(quote, voicename=\"willfromafar\"):", "synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult =", "+ voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" +", "\"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers = {'content-type': 'application/x-www-form-urlencoded'}", "nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\"", "+ \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote +", "'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data = synthdata, headers = headers) minuspre", "noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" + nonce", "data = noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" +", "Path import requests import string import urllib import random import json import sys", "voiceid = \"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters", "= noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\" synthdata = \"req_voice=\" + voiceid + \"&cl_pwd=&cl_vers=1-30&req_echo=ON&cl_login=AcapelaGroup&req_comment=%7B%22nonce%22%3A%22\" +", "= requests.post(url = noncerl, data = noncedata) nonce = noncer.text[10:50] synthrl = \"http://www.acapela-group.com:8080/webservices/1-34-01-Mobility/Synthesizer\"", "minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\"", "= minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\" +", "= {'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data = synthdata, headers =", "requests import string import urllib import random import json import sys def acapyla(quote,", "+ \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail = ''.join(random.choice(letters)", "minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\" urllib.request.urlretrieve(synthresult, str(Path.home()) + \"/.dominae/out/tts/\" + email[:8]", "= premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url", "+ nonce + \"%22%2C%22user%22%3A%22\" + email + \"%22%7D&req_text=\" + quote + \"&cl_env=ACAPELA_VOICES&prot_vers=2&cl_app=AcapelaGroup_WebDemo_Android\" headers", "headers) minuspre = synthr.text[synthr.text.find('http://'):] minussuf = minuspre.split(\".mp3\", 1)[0] synthresult = minussuf + \".mp3\"", "+ voicename + \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase premail", "\"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url = noncerl, data = noncedata) nonce", "premail + \"@gmail.com\" noncerl = \"https://acapelavoices.acapela-group.com/index/getnonce/\" noncedata = {'googleid':email} noncer = requests.post(url =", "= requests.post(url = synthrl, data = synthdata, headers = headers) minuspre = synthr.text[synthr.text.find('http://'):]", "\"enu_\" + voicename + \"_22k_ns.bvcu\" except IndexError: voiceid = \"enu_willfromafar_22k_ns.bvcu\" letters = string.ascii_lowercase", "{'content-type': 'application/x-www-form-urlencoded'} synthr = requests.post(url = synthrl, data = synthdata, headers = headers)" ]
[ "coding: UTF-8 -*- import requests import json import datetime import pandas as pd", "#最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容', '發文時間']) # information_df.to_csv('Data Visualization Information.csv',", "default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容', '發文時間']) #", "default = handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ':", "'), ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4,", "{'689157281218904':'台北技能交換'} feeds = [] for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while", "open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default =", "information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ':", "x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds = [] for ele in", "#!/usr/bin/python # -*- coding: UTF-8 -*- import requests import json import datetime import", "indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w') as outfile:", "for information in res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res", "= requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate)) with", "handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容', '發文時間']) # information_df.to_csv('Data Visualization", "res.json(): for information in res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']])", "res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for information in res.json()['data']: if", "if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'}", "from dateutil.parser import parse def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day)", "token)) while 'paging' in res.json(): for information in res.json()['data']: if 'message' in information:", "res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate))", "while 'paging' in res.json(): for information in res.json()['data']: if 'message' in information: feeds.append([group[ele],", "in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',',", "import requests import json import datetime import pandas as pd from dateutil.parser import", "for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for", "token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds = [] for ele in group:", "= [] for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in", "json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # #", "if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds,", "-*- import requests import json import datetime import pandas as pd from dateutil.parser", "outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 #", "ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',',", "ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容', '發文時間'])", "import pandas as pd from dateutil.parser import parse def handleDate(x): if isinstance(x, datetime.date):", "information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False,", "json import datetime import pandas as pd from dateutil.parser import parse def handleDate(x):", "res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) #", "= {'689157281218904':'台北技能交換'} feeds = [] for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token))", "in res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next'])", "[] for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json():", "dateutil.parser import parse def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token", "information in res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res =", "information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default =", "indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df =", "'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4,", "separators=(',', ': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds,", "# -*- coding: UTF-8 -*- import requests import json import datetime import pandas", "# # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容', '發文時間']) # information_df.to_csv('Data Visualization Information.csv', index=False)", "import parse def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token =", "print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w') as", "': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁',", "in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for information in", "requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate)) with open('feeds.json',", "<gh_stars>0 #!/usr/bin/python # -*- coding: UTF-8 -*- import requests import json import datetime", "= handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容', '發文時間']) # information_df.to_csv('Data", "in res.json(): for information in res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'], parse(information['updated_time']).date(),", "with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default", "'), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df = pd.DataFrame(feeds, columns=['粉絲專頁', '發文內容',", "feeds = [] for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging'", "= handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '),", "parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default", "as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔", "requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for information in res.json()['data']: if 'message' in", "handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group =", "separators=(',', ': '), ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds,", "requests import json import datetime import pandas as pd from dateutil.parser import parse", "'<KEY>' group = {'689157281218904':'台北技能交換'} feeds = [] for ele in group: res =", "'paging' in res.json(): for information in res.json()['data']: if 'message' in information: feeds.append([group[ele], information['message'],", "x.month, x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds = [] for ele", "datetime import pandas as pd from dateutil.parser import parse def handleDate(x): if isinstance(x,", "pd from dateutil.parser import parse def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month,", "group = {'689157281218904':'台北技能交換'} feeds = [] for ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele,", "def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group", "as pd from dateutil.parser import parse def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year,", "import datetime import pandas as pd from dateutil.parser import parse def handleDate(x): if", "parse def handleDate(x): if isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>'", "'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate)", "': '), ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile,", "return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds = []", "UTF-8 -*- import requests import json import datetime import pandas as pd from", "# print(json.dumps(feeds, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate)) with open('feeds.json', 'w')", "outfile, indent=4, separators=(',', ': '), ensure_ascii=False, default = handleDate) #最後將list轉換成dataframe,並輸出成csv檔 # # information_df", "datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds =", "= requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for information in res.json()['data']: if 'message'", "-*- coding: UTF-8 -*- import requests import json import datetime import pandas as", "group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for information in res.json()['data']:", "pandas as pd from dateutil.parser import parse def handleDate(x): if isinstance(x, datetime.date): return", "\"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds = [] for", "isinstance(x, datetime.date): return \"{}-{}-{}\".format(x.year, x.month, x.day) token = '<KEY>' group = {'689157281218904':'台北技能交換'} feeds", "= '<KEY>' group = {'689157281218904':'台北技能交換'} feeds = [] for ele in group: res", "feeds.append([group[ele], information['message'], parse(information['updated_time']).date(), information['id']]) res = requests.get(res.json()['paging']['next']) # print(json.dumps(feeds, indent=4, separators=(',', ': '),", "import json import datetime import pandas as pd from dateutil.parser import parse def", "ele in group: res = requests.get('https://graph.facebook.com/v2.9/{}/feed?limit=100&access_token={}'.format(ele, token)) while 'paging' in res.json(): for information", "handleDate)) with open('feeds.json', 'w') as outfile: json.dump(feeds, outfile, indent=4, separators=(',', ': '), ensure_ascii=False," ]
[ "'mujoco': cnfg = mujoco_config() elif env_type == 'atari': cnfg = atari_config() else: cnfg", "noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict:", "str]: if env_type == 'mujoco': cnfg = mujoco_config() elif env_type == 'atari': cnfg", "vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str) -> Tuple[Dict, str]: if env_type ==", "return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5,", "nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 )", "def get_config(env_type: str) -> Tuple[Dict, str]: if env_type == 'mujoco': cnfg = mujoco_config()", "7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64,", "timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16", "-> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x:", "str) -> Tuple[Dict, str]: if env_type == 'mujoco': cnfg = mujoco_config() elif env_type", "env_type == 'atari': cnfg = atari_config() else: cnfg = atari_config() return cnfg, env_type", "elif env_type == 'atari': cnfg = atari_config() else: cnfg = atari_config() return cnfg,", "dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0,", "-> Tuple[Dict, str]: if env_type == 'mujoco': cnfg = mujoco_config() elif env_type ==", "return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5,", "-> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x:", "== 'mujoco': cnfg = mujoco_config() elif env_type == 'atari': cnfg = atari_config() else:", ") def get_config(env_type: str) -> Tuple[Dict, str]: if env_type == 'mujoco': cnfg =", "mujoco_config() elif env_type == 'atari': cnfg = atari_config() else: cnfg = atari_config() return", "cnfg = mujoco_config() elif env_type == 'atari': cnfg = atari_config() else: cnfg =", "lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str)", "bptt=16 ) def mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95,", "= mujoco_config() elif env_type == 'atari': cnfg = atari_config() else: cnfg = atari_config()", "ent_coef=0.0, bptt=8 ) def get_config(env_type: str) -> Tuple[Dict, str]: if env_type == 'mujoco':", "if env_type == 'mujoco': cnfg = mujoco_config() elif env_type == 'atari': cnfg =", "env_type == 'mujoco': cnfg = mujoco_config() elif env_type == 'atari': cnfg = atari_config()", "noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str) ->", "Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x,", "Tuple def atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1,", "atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda", "gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config()", "Dict, Tuple def atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95,", "nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def", "learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str) -> Tuple[Dict, str]:", "ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99,", "typing import Dict, Tuple def atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1,", "timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8", "vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1,", "nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def", "lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() ->", "bptt=8 ) def get_config(env_type: str) -> Tuple[Dict, str]: if env_type == 'mujoco': cnfg", "def atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5,", "x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str) -> Tuple[Dict, str]: if", "x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict: return dict( timesteps=1e6,", "Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x,", "mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda", "max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str) -> Tuple[Dict,", "from typing import Dict, Tuple def atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32,", "max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict: return", "<gh_stars>10-100 from typing import Dict, Tuple def atari_config() -> Dict: return dict( timesteps=10e6,", "gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type:", "get_config(env_type: str) -> Tuple[Dict, str]: if env_type == 'mujoco': cnfg = mujoco_config() elif", "Tuple[Dict, str]: if env_type == 'mujoco': cnfg = mujoco_config() elif env_type == 'atari':", "nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 )", "import Dict, Tuple def atari_config() -> Dict: return dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99,", "7e-4*x, vf_coef=0.5, ent_coef=0.0, bptt=8 ) def get_config(env_type: str) -> Tuple[Dict, str]: if env_type", ") def mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1,", "learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01, bptt=16 ) def mujoco_config() -> Dict: return dict(", "def mujoco_config() -> Dict: return dict( timesteps=1e6, nsteps=64, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5,", "dict( timesteps=10e6, nsteps=32, nminibatches=1, gamma=0.99, lam=0.95, noptepochs=1, max_grad_norm=0.5, learning_rate=lambda x: 7e-4*x, vf_coef=0.5, ent_coef=0.01," ]
[ ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"},", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\"", "metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags", "= \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\"", "``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type", "``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath:", "validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document:", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None,", "= \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource", "\"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"},", "class SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib(", "metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"},", "= \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags:", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str", "PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: -", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib(", "Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name:", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type", "p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None,", "``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document:", "rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class", "metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object Type =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration", "converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\"", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None,", "default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib(", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] =", "= attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources", "- ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "- ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration -", "Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype -", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc:", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list,", "Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"},", "metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"},", "- ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration", "dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration',", "@attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property", "\"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list,", "validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict,", "Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE =", "\"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command:", "p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc:", "p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc:", "attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib(", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None,", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME:", "``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"},", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration:", "metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit),", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None,", "``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] =", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, )", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None,", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib(", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "\"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html", "Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int =", "AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath", "converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None,", "p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\"", "validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document:", "\"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly:", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property):", "\"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, )", "= attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret',", "= \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order", "PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: -", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib(", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib(", "default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"},", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME:", "- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties -", "AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object", "Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId:", "- ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration -", "Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type =", "class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "class JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"},", "AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc:", "AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid", "- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "\"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam:", "dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled:", "Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes -", "``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"},", "\"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, )", "converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None,", "- ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str =", "default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib(", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib(", "Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property):", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, )", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE", "\"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str", "default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None,", "\"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, )", "p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list,", "bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool =", "``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib(", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice',", "\"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\"", "class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document:", "``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``:", "typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str =", "= attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class", "\"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property):", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``:", "``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "@attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``:", "``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``:", "Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib(", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc:", "default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib(", "- ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags:", "``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "- ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str =", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib(", "@attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] =", "\"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE =", "p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\"", "\"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration:", "PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: -", "PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: -", "metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)),", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\"", "validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type", "Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE =", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None,", "p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return", "- ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "- ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str =", "metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes),", "= attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\"", "\"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus:", "validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier:", "rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str", "\"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\"", "rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str", "- ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits -", "typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc:", "p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int", "- ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\"", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None,", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"},", "dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type -", "\"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair", "Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts:", "\"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\"", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``:", "converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib(", "= attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str =", "``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, )", "``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``:", "\"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\"", "typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "\"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\"", "default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS", "metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"},", "typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource", "``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags:", "attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib(", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE =", "Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``:", "default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\"", "validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property):", "- ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration -", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc:", "= attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str", "attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]]", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc:", "\"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, )", "p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object", "@attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property", "\"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, )", "converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"},", "PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: -", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"},", "metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy:", "default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None,", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE =", "``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc:", "p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit',", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn", "``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type:", "class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\"", "Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole", "metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"},", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions", "metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"},", "AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS", "PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]]", "p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\"", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc:", "Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn", "``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE", "- ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags -", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib(", "``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib(", "p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\"", "attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int =", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "- ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus -", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE =", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc:", "p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs',", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s", "\"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\"", "\"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type", "metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type =", "..core.model import ( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant import", "dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str", "PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, )", "typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None,", "``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib(", "attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property):", "rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty',", "default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None,", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification',", "AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version:", "attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib(", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document:", "class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason -", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] =", "Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus -", "= \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\"", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib(", "p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration',", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib(", "- ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory -", "Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\"", "from ..core.constant import AttrMeta #--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS", "Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``:", "metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"},", "``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc:", "``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self)", "metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"},", "\"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``:", "p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\"", "TypeHint, TypeCheck, ) from ..core.constant import AttrMeta #--- Property declaration --- @attr.s class", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, )", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None,", "``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``:", "AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\"", "typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\"", "dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits:", "p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, )", "\"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\"", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type", "Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\"", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict,", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] =", "= \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\"", "p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc:", "\"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME:", "- ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\"", "metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"},", "\"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\"", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\"", "attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc:", "- ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib(", "Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId:", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib(", "``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib(", "default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None,", "p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str", "dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int", "``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``:", "Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE", "metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"},", "= attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] =", "= \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom", "Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\"", "\"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, )", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib(", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html", "- ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str =", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict]", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict]", "\"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command -", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource", "typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName:", "\"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``:", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class", "- ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str =", "converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "- ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"},", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib(", "metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"},", "PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: -", "\"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, )", "typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name:", "``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib(", "p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document:", "default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None,", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] =", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, )", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, )", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib(", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int =", "default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str", "@attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``:", "\"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property):", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME:", "\"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``:", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict]", "p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc:", "- ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus -", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]]", "dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str", "@attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property", "metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib(", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\"", "- ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\"", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``:", "metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"},", "- ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str =", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))),", "Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict]", "default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object", "= attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class", "metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"},", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution", "- ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict]", "\"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties:", "- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: -", "- ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s", "AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype", "#--- Resource declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\"", "\"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"},", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None,", "- ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\"", "``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "\"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``:", "@attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, )", "Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str", "\"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, )", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None,", "Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image -", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None,", "``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document:", "\"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container:", "``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib(", "p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str", "``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``:", "declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"},", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool =", "metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type =", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str =", "Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE =", "JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None,", "converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\"", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, )", "converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "- ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\"", "attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]]", "default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int =", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict,", "Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant import AttrMeta #--- Property", "``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``:", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\"", "\"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\"", "metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s", "SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property):", "AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus", "``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath:", "class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document:", "TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None,", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None,", "``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"},", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str =", "TypeCheck, ) from ..core.constant import AttrMeta #--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property):", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type", "metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib(", "\"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\"", "Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\"", "p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS", "Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags", "p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``:", "\"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\"", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name:", "p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib(", "metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "\"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self,", "default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None,", "p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type =", "p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy',", "- ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict,", "\"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\"", "from ..core.model import ( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant", "- ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\")", "\"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, )", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"},", "metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"},", "- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str =", "PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: -", "= \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority", "\"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource", "\"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property):", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged", "- ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``:", "``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\"", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object", "\"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name -", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None,", "``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None,", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE =", "Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib(", "\"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, )", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib(", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type =", "Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int =", "- ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document:", "Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE", "- ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints -", "converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib(", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None,", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\"", "Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``:", "attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib(", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int =", "p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc:", "- ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str =", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\"", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)),", "typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\"", "``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib(", "Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE", "@attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib(", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc:", "\"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs:", "\"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport", "- ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements -", "metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"},", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None,", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib(", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\"", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc:", "``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy:", "Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``:", "= \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties", "\"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\"", "\"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, )", "class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document:", "metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"},", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``:", "\"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "= \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\"", "p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, )", "\"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, )", "default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None,", "Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE =", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib(", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc:", "Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``:", "``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib(", "metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type =", "- ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\"", "\"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, )", "\"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup", "Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc:", "PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: -", "``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] =", "metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"},", "- ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str =", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type", "converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None,", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type", "\"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities", "``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder),", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib(", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, )", "Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name -", "\"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME:", "- ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, )", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib(", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None,", "- ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document:", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] =", "utf-8 -*- \"\"\" This module \"\"\" import attr import typing from ..core.model import", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float", "\"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"},", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, )", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None,", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib(", "= \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\"", "p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str]", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration", "p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes',", "p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class", "\"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"},", "= \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\"", "\"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS Object", "\"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type", "typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None,", "``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib(", "\"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize:", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list,", "\"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, )", "default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object", "= \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\"", "``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE", "AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode", "p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict", "\"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, )", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib(", "p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str", "default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib(", "- ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource", "@attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\"", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document:", "\"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled -", "class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\"", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object", "Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``:", "typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters", "- ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid -", "@attr.s class JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property", "class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document:", "``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``:", "``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document:", "``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``:", "metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type =", "\"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\"", "PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: -", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"},", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME:", "AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc:", "Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption", "\"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))),", "default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None,", "rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] =", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason:", "metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration',", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type", "\"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\"", "= \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list,", "``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "= \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources:", "``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE", "\"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type =", "metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"},", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME:", "p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list,", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\"", "@attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\"", "AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS", "- ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float =", "``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\"", "p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\"", "\"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, )", "- ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\"", "- ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets -", "p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class", "@attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\"", "- ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly -", "float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]]", "dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib(", "= \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc:", "default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\"", "\"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"},", "@attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property", "Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc:", "= \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\"", "validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, )", "dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource):", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"},", "- ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int =", "\"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, )", "``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict,", "- ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration -", "Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements:", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] =", "``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``:", "``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "= \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\"", "Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``:", "= attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict]", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, )", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict,", "metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"},", "``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout:", "\"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\"", "``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib(", "- ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\"", "\"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, )", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)),", "\"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"},", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "\"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\"", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) ->", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int),", "converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, )", "# -*- coding: utf-8 -*- \"\"\" This module \"\"\" import attr import typing", "\"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration", "\"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, )", "p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float", "AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib(", "rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str =", "Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``:", "Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict]", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, )", "class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document:", "= attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str =", "Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration -", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object", "attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property):", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None,", "= \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty),", "- ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole -", "= \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, )", "- ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib(", "PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: -", "= attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None,", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib(", "bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]]", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None,", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\"", "\"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, )", "\"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename -", "p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)),", "-*- coding: utf-8 -*- \"\"\" This module \"\"\" import attr import typing from", "metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None,", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html", "\"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\"", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, )", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties:", "- ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness -", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str),", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource", "AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices", "bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict]", "= \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\"", "default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)),", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None,", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource", "- ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\"", "p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\"", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"},", "\"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)),", "This module \"\"\" import attr import typing from ..core.model import ( Property, Resource,", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)),", "- ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str", "dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]]", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\"", "``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc:", "\"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\"", "- ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"},", "Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE", "typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\"", "p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\"", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, )", "@attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property", "= \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode", "\"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, )", "PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE =", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\"", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds:", "= attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\"", "AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\"", "AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc:", "\"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME:", "Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str", "metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"},", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str =", "p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class", "``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] =", "``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib(", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib(", "metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"},", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE =", "``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``:", "metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type =", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, )", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None,", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object", "= \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly", "rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int", "metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage", "metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)),", "Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE", "- ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes -", "\"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``:", "``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``:", "\"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets -", "- ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn -", "Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``:", "\"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))),", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None,", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str =", "--- @attr.s class JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib(", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document:", "p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, )", "``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"},", "``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action:", "p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\"", "\"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName:", "p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib(", "p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole:", "class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document:", "metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource", "``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver:", "dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None,", "PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: -", "typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory:", "``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib(", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type", "\"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath:", "validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list,", "@attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str =", "Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\"", "\"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap -", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration ---", "Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"},", "``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\"", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document:", "Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)),", "rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str]", "= \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State:", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property):", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\"", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, )", "dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict", "Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant import AttrMeta #--- Property declaration ---", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html", "metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)),", "``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\"", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None,", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS", "\"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))),", "``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId:", "dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource):", "default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None,", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"},", "\"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"},", "metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc:", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes", "typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None,", "\"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\"", "class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"},", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib(", "\"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, )", "Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document:", "class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``:", "default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] =", "= \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds", "float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property):", "class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document:", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "- ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "@attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets", "rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str", "\"\"\" This module \"\"\" import attr import typing from ..core.model import ( Property,", "attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property):", "import attr import typing from ..core.model import ( Property, Resource, Tag, GetAtt, TypeHint,", "= \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\"", "converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib(", "default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"},", "``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "= attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str =", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property):", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "rp_Image: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str]", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None,", "Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s", "- ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str =", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #---", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc:", "Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment -", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),", "- ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness:", "typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)),", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] =", "- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str =", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME:", "class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document:", "``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib(", "converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS", "metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"},", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html", "rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class", "attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict]", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``:", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions:", "dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User:", "rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, )", "p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes',", "AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\"", "Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``:", "import ( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant import AttrMeta", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, )", "\"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))),", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib(", "= \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc:", "\"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds -", "\"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))),", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS", "\"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, )", "dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS", "p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str]", "default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib(", "converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS", "= \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None,", "PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: -", "``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc:", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict,", "default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None,", "- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc:", "\"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str", "metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc:", "\"\"\" import attr import typing from ..core.model import ( Property, Resource, Tag, GetAtt,", "``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] =", "PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: -", "p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters',", "--- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html", "Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE =", "converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None,", "= \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value", "\"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\"", "Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"},", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib(", "typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, )", "- ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities -", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]]", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]]", "metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc:", "Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``:", "\"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, )", "default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None,", "``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions:", "metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement),", "rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment", "typing from ..core.model import ( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from", "default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] =", "@attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property", "rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str", "-> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object", "``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``:", "``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``:", "Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib(", "\"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))),", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type =", "attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int =", "= \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\"", "p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\"", "Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder -", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``:", "\"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str", "float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float =", "default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib(", "= \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"},", "typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document:", "rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties',", "metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"},", "metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"},", "p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class", "= \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"},", "= attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration',", "= \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name", "\"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties:", "``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib(", "PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: -", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc:", "\"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] =", "bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int =", "metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"},", "= \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)),", "converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None,", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME:", "typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration:", "- ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\"", "\"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME:", "metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"},", "rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes", "iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"},", "typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\"", "Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``:", "= \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\"", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None,", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type =", "p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject',", "p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, )", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] =", "declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document:", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class JobQueue(Resource): \"\"\"", "Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, )", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None,", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``:", "typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType:", "dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn:", "- ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type", "``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode:", "p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources',", "``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "\"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value:", "- ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption -", "- ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy -", "= \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\"", "AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None,", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object", "- ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\"", "Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None,", "= attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property):", "metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"},", "= \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\"", "\"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap:", "@attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property", "attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] =", "\"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib(", "\"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\"", "Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\"", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, )", "= attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int", "\"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\"", "p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement',", "``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", "p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions", "``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib(", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None,", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\"", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object", "- ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state -", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),", "``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html", "default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None,", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str =", "= \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\"", "rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict,", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME:", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc:", "= \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type", "= \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\"", "- ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib(", "\"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\"", "AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"},", "Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions:", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list,", "- ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str =", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str", "\"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"},", "ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: -", "default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None,", "\"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\"", "dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s", "AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib(", "attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool =", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list,", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type", "rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int", "p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\"", "default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib(", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib(", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None,", "``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE", "``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE", "= attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int =", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int =", "p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int", "- ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None,", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class", "- ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] =", "default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt:", "= \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size", "= attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc:", "``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib(", "- ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str =", "default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\"", "@attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property", "- ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup -", "Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment:", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]]", "Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``:", "metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str =", "AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name", "- ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib(", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME:", "- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib(", "``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``:", "- ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn -", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document:", "``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``:", "- ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority -", "rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig',", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\"", "PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\"", "= \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, )", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``:", "p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool", "Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts -", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, )", "converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\"", "= attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\"", "class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document:", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib(", "- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"},", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] =", "- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus -", "= attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int", "dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost',", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None,", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume:", "= attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] =", "attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property):", "= \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container", "\"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\"", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "- ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\"", "p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\"", "= attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class", "``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``:", "Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str", "- ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\"", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``:", "AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes", "metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)),", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE", "\"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict]", "p_SchedulingPriority: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout',", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource Document:", "p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int =", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None,", "- ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str =", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"},", "\"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit:", "@attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property", "default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None,", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]]", "\"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None,", "typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\"", "Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy", "Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode -", "- ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str =", "``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None,", "dict]] = attr.ib( default=None, converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s", "p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"},", "``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy - ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc:", "class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document:", "\"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``:", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None,", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\"", "converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)),", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict]", "metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"},", "\"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, )", "\"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] =", "Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None,", "- ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\"", "\"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"},", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type", "Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, )", "= \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret),", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS", "metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``:", "= attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict]", "- ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids -", "``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib(", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict,", "p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class", "\"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\"", "\"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem", "\"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host -", "typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\"", "- ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s", "Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid -", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib(", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type =", "- ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\"", "``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters - ``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``:", "@attr.s class JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s", "AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags", "rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class", "attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration\"\"\" p_Ec2KeyPair: TypeHint.intrinsic_str =", "- ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\" @attr.s class PropJobDefinitionEnvironment(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html", "AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit", "AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s", "class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document:", "GetAtt, TypeHint, TypeCheck, ) from ..core.constant import AttrMeta #--- Property declaration --- @attr.s", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str", "dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property):", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume", "- ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS", "p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties',", "class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document:", "validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME:", "``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib(", "Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``:", "p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class", "@attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] =", "converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, )", "\"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname -", "metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"},", "-*- \"\"\" This module \"\"\" import attr import typing from ..core.model import (", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc:", "\"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject),", "p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str]", "- ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout -", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib(", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn:", "``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``:", "p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class", "typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc:", "- ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\"", "\"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``:", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None,", "dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type", "Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib(", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\"", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str =", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class JobQueue(Resource):", "- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib(", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool =", "typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\"", "validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type", "- ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"},", "Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize", "- ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole -", "\"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"},", "metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"},", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None,", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] =", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE =", "- ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn -", "Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy',", "metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME:", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags", "Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s", "Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid -", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE =", "\"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName:", "AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"},", "= \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS Object Type =", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object", "converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None,", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``:", "\"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None,", "\"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, )", "- ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\"", "- ``p_Vcpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus - ``p_Volumes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ContainerProperties\" rp_Image: TypeHint.intrinsic_str =", "@attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole", "metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\" p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"},", "Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE =", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list,", "``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None,", "converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS", "class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document:", "``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user - ``p_Vcpus``:", "= \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets", "\"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME:", "``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object", "\"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, )", "``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder:", "metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration: typing.Union['PropJobDefinitionNetworkConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)),", "default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None,", "\"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds:", "Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``:", "\"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"},", "- ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int =", "p_SharedMemorySize: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SharedMemorySize\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int", "``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib(", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources -", "Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``:", "\"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource Document:", "Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE =", "- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict]", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource", "``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE", "metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME:", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property):", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``:", "dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes:", "class JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document:", "- ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\"", "``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int", "AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource", "= \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\"", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s", "Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"},", "\"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, )", "= attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str", "= attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str =", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property): \"\"\" AWS Object", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc:", "( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant import AttrMeta #---", "``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource", "p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int", "metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"},", "Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None,", "``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``:", "``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters - ``p_PlatformCapabilities``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities - ``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``:", "``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib(", "= attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int =", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc:", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str", "PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\"", "- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type", "dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"EvaluateOnExit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, )", "AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name", "- ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]]", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s", "typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"},", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object", "``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None,", "iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"NodeRangeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc:", "= attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None,", "\"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"},", "metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\"", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.MountPoints\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``:", "AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type", "metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)),", "GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type", "= \"AWS::Batch::JobDefinition.Environment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value", "converter=PropJobDefinitionVolumes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS", "\"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, )", "= \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\"", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)),", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None,", "default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource", "converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\"", "\"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, )", "\"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties -", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None,", "PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: -", "\"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document:", "p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc:", "p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\"", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, )", "\"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, )", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename -", "Type = \"AWS::Batch::SchedulingPolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html Property Document: - ``p_FairsharePolicy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy - ``p_Name``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None,", "Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation - ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property):", "p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc:", "- ``p_BidPercentage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage - ``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair -", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None,", "\"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s", "default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None,", "\"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc:", "``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\" #--- Resource declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS Object Type", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "module \"\"\" import attr import typing from ..core.model import ( Property, Resource, Tag,", "validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME:", "Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property Document: - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name -", "\"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME:", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc:", "default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None,", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type - ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``:", "metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object", "p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str", "attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property):", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy\" p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"},", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobQueueName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename\"\"\"", "- ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\"", "\"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, )", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib(", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] = attr.ib( default=None, converter=PropJobDefinitionNodeRangeProperty.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionNodeRangeProperty), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME:", "AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html Property Document: - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value \"\"\" AWS_OBJECT_TYPE", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"},", "dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str", "metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type =", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionVolumes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Volumes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object", "\"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig:", "PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc:", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion\"\"\" @attr.s class PropJobDefinitionTimeout(Property): \"\"\" AWS Object Type =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None,", "p_Options: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Options\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options\"\"\" p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret',", "class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document:", "``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId:", "metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled\"\"\" p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)),", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type\"\"\" p_AllocationStrategy: TypeHint.intrinsic_str = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"InstanceTypes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)),", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``:", "Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]]", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"},", "= \"AWS::Batch::JobDefinition.Volumes\" p_EfsVolumeConfiguration: typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, )", "= \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\"", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib(", "value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"},", "dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str", "p_FairsharePolicy: typing.Union['PropSchedulingPolicyFairsharePolicy', dict] = attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"},", "p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class", "p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str", "= \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit", "= \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib(", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] =", "\"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``:", "typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters:", "dict] = attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc:", "Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" p_ShareIdentifier: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, )", "- ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-type - ``p_AllocationStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy -", "attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib(", "validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME:", "``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath:", "attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str =", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME: \"Host\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "p_TransitEncryptionPort: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc:", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s", "Resource, Tag, GetAtt, TypeHint, TypeCheck, ) from ..core.constant import AttrMeta #--- Property declaration", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume\"\"\" @attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize\"\"\" p_Swappiness: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration\"\"\" p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"},", "metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"},", "default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None,", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath\"\"\" p_ReadOnly: bool = attr.ib(", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment',", "p_SchedulingPolicyArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib(", "p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc:", "@attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, )", "= \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\"", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc:", "rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict]", "class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document:", "converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib(", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None,", "default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib(", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc:", "\"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document:", "\"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath -", "\"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"},", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort: int = attr.ib(", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"},", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None,", "\"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name -", "@attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict,", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str =", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str]", "AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath", "AttrMeta #--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"DesiredvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\"", "AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html Property Document: - ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc:", "dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\"", "\"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))),", "\"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size:", "Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason - ``p_OnStatusReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder',", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes:", "default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib(", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``:", "Type = \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountOptions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions\"\"\" @attr.s class PropJobDefinitionEfsVolumeConfiguration(Property):", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions\"\"\" @attr.s class PropComputeEnvironmentLaunchTemplateSpecification(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" Resource", "default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html Property Document: - ``p_EfsVolumeConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration - ``p_Host``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-host - ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name", "dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\" p_NetworkConfiguration:", "\"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, )", "Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE", "PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version\"\"\" @attr.s class PropJobDefinitionMountPoints(Property):", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\" p_OnStatusReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list,", "metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type\"\"\" p_ContainerProperties: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)),", "Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE =", "\"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\"", "metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"},", "p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str", "``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``:", "p_MaxSwap: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MaxSwap\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap\"\"\" p_SharedMemorySize: int", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"},", "p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int", "= \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\"", "converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourceVolume\"}, ) \"\"\"Doc:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem - ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath\"\"\" p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc:", "\"LinuxParameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged\"\"\" p_ReadonlyRootFilesystem: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)),", "metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration\"\"\" p_InstanceType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"},", "metadata={AttrMeta.PROPERTY_NAME: \"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type =", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME:", "- ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\"", "rp_LogDriver: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib(", "default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "\"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, )", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property Document: - ``rp_MaxvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus - ``rp_Subnets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets - ``rp_Type``:", "``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name - ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib(", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath - ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``:", "@attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property", "metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html", "\"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\"", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html Property Document: - ``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution:", "class ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document:", "metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"},", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource", "validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason\"\"\"", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict,", "validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS Object Type", "\"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode:", "class PropJobDefinitionEfsVolumeConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document:", "\"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, )", "dict]] = attr.ib( default=None, converter=PropJobDefinitionResourceRequirement.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets:", "typing.Union['PropJobDefinitionEfsVolumeConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn\"\"\" p_LinuxParameters: typing.Union['PropJobDefinitionLinuxParameters', dict] = attr.ib( default=None, converter=PropJobDefinitionLinuxParameters.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLinuxParameters)), metadata={AttrMeta.PROPERTY_NAME: \"LinuxParameters\"}, )", "\"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\"", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"PlatformCapabilities\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Timeout\" p_AttemptDurationSeconds: int = attr.ib(", "- ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath - ``p_Permissions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption:", "\"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] = attr.ib( default=None, converter=PropJobDefinitionVolumesHost.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionVolumesHost)), metadata={AttrMeta.PROPERTY_NAME:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ImageId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"},", "- ``p_ResourceRequirements``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements - ``p_Secrets``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets - ``p_Ulimits``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits - ``p_User``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user -", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"HostPath\"}, ) \"\"\"Doc:", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Ec2KeyPair\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair\"\"\" p_ImageId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-containerpath - ``p_ReadOnly``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly - ``p_SourceVolume``:", "= \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\"", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Type\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type\"\"\" p_ComputeEnvironmentName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: - ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action - ``p_OnExitCode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode - ``p_OnReason``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onreason", "@attr.s class PropSchedulingPolicyShareAttributes(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property", "import typing from ..core.model import ( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck, )", "attr import typing from ..core.model import ( Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"FileSystemId\"},", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\" p_SpotIamFleetRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\"", "\"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, )", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\" p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),", "\"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html Property Document: - ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid - ``p_LaunchTemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename - ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\"", "dict]] = attr.ib( default=None, converter=PropJobDefinitionTmpfs.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionTmpfs), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Tmpfs\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s", "@attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property", "iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit -", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus\"\"\" p_Ec2Configuration: typing.List[typing.Union['PropComputeEnvironmentEc2ConfigurationObject', dict]] = attr.ib( default=None, converter=PropComputeEnvironmentEc2ConfigurationObject.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropComputeEnvironmentEc2ConfigurationObject), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ec2Configuration\"}, )", "metadata={AttrMeta.PROPERTY_NAME: \"Command\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment),", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Order\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order\"\"\" @attr.s class PropJobDefinitionSecret(Property): \"\"\"", "default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None,", "validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Priority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority\"\"\" p_JobQueueName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" rp_FileSystemId: TypeHint.intrinsic_str", "\"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None,", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness - ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs", "dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution\"\"\" @attr.s", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "\"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties', dict] = attr.ib( default=None,", "Resource declaration --- @attr.s class JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource", "Object Type = \"AWS::Batch::JobDefinition.Tmpfs\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html Property Document: - ``rp_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath -", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict]", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value\"\"\"", "\"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]] = attr.ib( default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, )", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AssignPublicIp\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib(", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"},", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Device\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid\"\"\" p_InstanceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole\"\"\"", "= \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier - ``p_WeightFactor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource Document:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-name\"\"\" p_Value: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc:", "attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib(", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html Property Document: - ``p_Type``:", "``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"InitProcessEnabled\"}, ) \"\"\"Doc:", "p_Memory: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints',", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs\"\"\" @attr.s class PropJobDefinitionContainerProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ContainerProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html", "= \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MainNode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode\"\"\" rp_NodeRangeProperties: typing.List[typing.Union['PropJobDefinitionNodeRangeProperty', dict]] =", "- ``rp_ComputeEnvironment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment - ``rp_Order``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-order \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str =", "- ``p_ContainerProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties - ``p_JobDefinitionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname - ``p_NodeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties - ``p_Parameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters -", "typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"MountPoints\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints\"\"\"", "rp_TargetNodes: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"TargetNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes\"\"\" p_Container: typing.Union['PropJobDefinitionContainerProperties',", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption\"\"\" p_TransitEncryptionPort:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document:", "default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)),", "@attr.s class PropJobDefinitionSecret(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Secret\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html Property", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html Property Document: - ``p_PlatformVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-fargateplatformconfiguration.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration-platformversion \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property):", "p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Devices\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices\"\"\" p_InitProcessEnabled: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)),", "\"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"}, )", "``p_PropagateTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags - ``p_RetryStrategy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy - ``p_SchedulingPriority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority - ``p_Timeout``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout - ``p_Tags``:", "- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type - ``p_ComputeEnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole -", "PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: -", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory - ``p_TransitEncryption``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryption - ``p_TransitEncryptionPort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport \"\"\" AWS_OBJECT_TYPE =", "@property def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource):", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole\"\"\"", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-jobdefinitionname\"\"\" p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None,", "p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-weightfactor\"\"\" @attr.s class", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None, converter=PropJobDefinitionDevice.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionDevice),", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\"", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] = attr.ib(", "= \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadOnly\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-readonly\"\"\" p_SourceVolume: TypeHint.intrinsic_str = attr.ib( default=None,", "PropJobDefinitionEvaluateOnExit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.EvaluateOnExit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html Property Document: -", "validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.VolumesHost\" p_SourcePath: TypeHint.intrinsic_str =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Vcpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-vcpus\"\"\" p_Volumes: typing.List[typing.Union['PropJobDefinitionVolumes', dict]] = attr.ib(", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-volumes\"\"\" @attr.s class PropJobDefinitionNodeRangeProperty(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeRangeProperty\" Resource", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname - ``p_ComputeResources``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources - ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-servicerole - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state - ``p_UnmanagedvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc:", "dict] = attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class", "- ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype -", "rp_Size: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str]", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None,", "attr.ib( default=None, converter=PropJobDefinitionEfsVolumeConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionEfsVolumeConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"EfsVolumeConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-efsvolumeconfiguration\"\"\" p_Host: typing.Union['PropJobDefinitionVolumesHost', dict] =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\"", "\"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"},", "def rv_Arn(self) -> GetAtt: \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#aws-resource-batch-schedulingpolicy-return-values\"\"\" return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\"", "PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: -", ") from ..core.constant import AttrMeta #--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\"", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name: TypeHint.intrinsic_str =", "Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html Property Document: - ``p_Attempts``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts - ``p_EvaluateOnExit``:", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"LogDriver\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver\"\"\" p_Options: dict = attr.ib(", "Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-type -", "metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplatename\"\"\" p_Version: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Version\"},", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-tags\"\"\" @attr.s class SchedulingPolicy(Resource): \"\"\" AWS Object Type = \"AWS::Batch::SchedulingPolicy\" Resource", "default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-action\"\"\" p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\"", "validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "\"AWS::Batch::JobDefinition.NodeProperties\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties -", "#--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\"", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplateId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-launchtemplateid\"\"\" p_LaunchTemplateName: TypeHint.intrinsic_str = attr.ib(", "\"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\"", "metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type =", "\"SpotIamFleetRole\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, )", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig - ``p_RootDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory -", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentName\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict,", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumes.html#cfn-batch-jobdefinition-volumes-name\"\"\" @attr.s class PropSchedulingPolicyFairsharePolicy(Property): \"\"\" AWS Object Type =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s", "\"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\"", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-containerpath\"\"\" rp_Size: int = attr.ib( default=None,", "\"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit), iterable_validator=attr.validators.instance_of(list))),", "attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment\"\"\" p_ExecutionRoleArn: TypeHint.intrinsic_str =", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size - ``p_MountOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-mountoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Tmpfs\" rp_ContainerPath: TypeHint.intrinsic_str = attr.ib(", "= attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"Size\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-tmpfs.html#cfn-batch-jobdefinition-tmpfs-size\"\"\" p_MountOptions: typing.List[TypeHint.intrinsic_str] = attr.ib(", "attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"Container\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"WeightFactor\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-ulimits\"\"\" p_User: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"User\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-user\"\"\" p_Vcpus:", "- ``p_ShareDecaySeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds - ``p_ShareDistribution``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedistribution \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" p_ComputeReservation: float =", "p_SecretOptions: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecretOptions\"}, ) \"\"\"Doc:", "int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"MaxvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-maxvcpus\"\"\" rp_Subnets: typing.List[TypeHint.intrinsic_str] =", "= \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes\"\"\" p_LaunchTemplate: typing.Union['PropComputeEnvironmentLaunchTemplateSpecification', dict] = attr.ib( default=None, converter=PropComputeEnvironmentLaunchTemplateSpecification.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentLaunchTemplateSpecification)), metadata={AttrMeta.PROPERTY_NAME: \"LaunchTemplate\"}, ) \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html Property Document: - ``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``:", "attr.ib( default=None, converter=PropSchedulingPolicyFairsharePolicy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropSchedulingPolicyFairsharePolicy)), metadata={AttrMeta.PROPERTY_NAME: \"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib(", "p_Permissions: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s", "p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str,", "- ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids - ``p_SpotIamFleetRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-spotiamfleetrole - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\"", "- ``p_Tmpfs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-tmpfs \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LinuxParameters\" p_Devices: typing.List[typing.Union['PropJobDefinitionDevice', dict]] = attr.ib( default=None,", "metadata={AttrMeta.PROPERTY_NAME: \"Memory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory\"\"\" p_MountPoints: typing.List[typing.Union['PropJobDefinitionMountPoints', dict]] = attr.ib( default=None, converter=PropJobDefinitionMountPoints.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionMountPoints),", "\"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: - ``rp_ComputeEnvironmentOrder``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority -", "Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp:", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryptionPort\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-transitencryptionport\"\"\" @attr.s class PropJobDefinitionDevice(Property):", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate\"\"\" p_MinvCpus: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"MinvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup:", "- ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state - ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobQueue\" rp_ComputeEnvironmentOrder: typing.List[typing.Union['PropJobQueueComputeEnvironmentOrder', dict]]", "attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\" @attr.s class PropJobDefinitionFargatePlatformConfiguration(Property): \"\"\" AWS", "validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionResourceRequirement), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None,", "Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPriority\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib(", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-sharedecayseconds\"\"\" p_ShareDistribution: typing.List[typing.Union['PropSchedulingPolicyShareAttributes', dict]] = attr.ib( default=None, converter=PropSchedulingPolicyShareAttributes.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSchedulingPolicyShareAttributes), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ShareDistribution\"},", "Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeRangeProperty\" rp_TargetNodes:", "= attr.ib( default=None, converter=PropJobDefinitionContainerProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionContainerProperties)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.EvaluateOnExit\" rp_Action: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Action\"},", "metadata={AttrMeta.PROPERTY_NAME: \"ComputeReservation\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation\"\"\" p_ShareDecaySeconds: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME: \"ShareDecaySeconds\"},", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionFargatePlatformConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionFargatePlatformConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"FargatePlatformConfiguration\"}, )", "Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid - ``p_Iam``:", "``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-launchtemplatespecification.html#cfn-batch-computeenvironment-launchtemplatespecification-version \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.LaunchTemplateSpecification\" p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html Property Document:", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ExecutionRoleArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn\"\"\" p_FargatePlatformConfiguration: typing.Union['PropJobDefinitionFargatePlatformConfiguration', dict] =", "Property Document: - ``rp_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image - ``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``:", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.AuthorizationConfig\" p_AccessPointId: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"},", "- ``p_SourceVolume``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-mountpoints.html#cfn-batch-jobdefinition-mountpoints-sourcevolume \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.MountPoints\" p_ContainerPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "@attr.s class PropJobDefinitionDevice(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-evaluateonexit\"\"\" @attr.s class PropJobDefinitionLinuxParameters(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LinuxParameters\" Resource Document:", "attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib(", "\"RootDirectory\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-rootdirectory\"\"\" p_TransitEncryption: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"TransitEncryption\"}, )", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-platformcapabilities\"\"\" p_PropagateTags: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html", "import AttrMeta #--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object Type", "``p_DesiredvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-desiredvcpus - ``p_Ec2Configuration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2configuration - ``p_Ec2KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-ec2keypair - ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``:", "@attr.s class PropComputeEnvironmentComputeResources(Property): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment.ComputeResources\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html Property", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int =", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"AttemptDurationSeconds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds\"\"\" @attr.s class PropJobDefinitionTmpfs(Property): \"\"\" AWS", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property):", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list,", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnReason\"}, ) \"\"\"Doc:", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container\"\"\" @attr.s class PropJobDefinitionNodeProperties(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.NodeProperties\" Resource", "coding: utf-8 -*- \"\"\" This module \"\"\" import attr import typing from ..core.model", "p_NodeProperties: typing.Union['PropJobDefinitionNodeProperties', dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\"", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource Document:", "\"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"HardLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit\"\"\" rp_Name:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state\"\"\" p_Tags: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\"", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder - ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-priority - ``p_JobQueueName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-jobqueuename - ``p_SchedulingPolicyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn - ``p_State``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-state", "default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SchedulingPolicyArn\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-schedulingpolicyarn\"\"\" p_State: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type =", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters\"\"\" p_LogConfiguration: typing.Union['PropJobDefinitionLogConfiguration', dict] = attr.ib( default=None, converter=PropJobDefinitionLogConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionLogConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"LogConfiguration\"}, )", "converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),", "validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-tags\"\"\" @property def rv_Arn(self) -> GetAtt: \"\"\"Doc:", "metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom\"\"\" @attr.s class PropJobDefinitionNetworkConfiguration(Property): \"\"\" AWS Object Type =", "\"ComputeResources\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeresources\"\"\" p_ServiceRole: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ServiceRole\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class PropJobQueueComputeEnvironmentOrder(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" Resource", "``p_Command``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command - ``p_Environment``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-environment - ``p_ExecutionRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-executionrolearn - ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes\"\"\"", "\"ImageIdOverride\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride\"\"\" @attr.s class PropJobDefinitionVolumes(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Volumes\"", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html Property Document: - ``rp_TargetNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-targetnodes - ``p_Container``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-noderangeproperty.html#cfn-batch-jobdefinition-noderangeproperty-container \"\"\" AWS_OBJECT_TYPE =", "attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Image\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-image\"\"\" p_Command: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None,", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"InstanceType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype\"\"\" p_JobRoleArn: TypeHint.intrinsic_str =", "- ``p_ImageId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-imageid - ``p_InstanceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancerole - ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate -", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property):", "= attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Secrets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-secrets\"\"\" p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit',", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-name\"\"\" rp_ValueFrom: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ValueFrom\"}, ) \"\"\"Doc:", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"BidPercentage\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-bidpercentage\"\"\" p_DesiredvCpus: int = attr.ib( default=None,", "AWS Object Type = \"AWS::Batch::JobDefinition.Timeout\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html Property Document: - ``p_AttemptDurationSeconds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-timeout.html#cfn-batch-jobdefinition-timeout-attemptdurationseconds", "\"FairsharePolicy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy\"\"\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int =", "``rp_HardLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-hardlimit - ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name - ``rp_SoftLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Ulimit\" rp_HardLimit:", "``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.ComputeResources\" rp_MaxvCpus: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus\"\"\" p_PlacementGroup: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlacementGroup\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\"", "default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict = attr.ib( default=None,", "= attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-timeout\"\"\" p_Tags: dict =", "..core.constant import AttrMeta #--- Property declaration --- @attr.s class PropJobDefinitionAuthorizationConfig(Property): \"\"\" AWS Object", "metadata={AttrMeta.PROPERTY_NAME: \"ContainerProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-containerproperties\"\"\" p_JobDefinitionName: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"JobDefinitionName\"},", "p_SourcePath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"SourcePath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath\"\"\" @attr.s class", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Value\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-environment.html#cfn-batch-jobdefinition-environment-value\"\"\" @attr.s class PropJobDefinitionVolumesHost(Property): \"\"\" AWS", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-tags\"\"\" @attr.s class PropJobDefinitionRetryStrategy(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.RetryStrategy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-command\"\"\" p_Environment: typing.List[typing.Union['PropJobDefinitionEnvironment', dict]] = attr.ib( default=None, converter=PropJobDefinitionEnvironment.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEnvironment), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Environment\"},", "p_Parameters: dict = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Parameters\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-parameters\"\"\" p_PlatformCapabilities: typing.List[TypeHint.intrinsic_str]", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ContainerPath\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath\"\"\" p_HostPath: TypeHint.intrinsic_str =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"ReadonlyRootFilesystem\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem\"\"\" p_ResourceRequirements: typing.List[typing.Union['PropJobDefinitionResourceRequirement', dict]] =", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid\"\"\" p_AuthorizationConfig: typing.Union['PropJobDefinitionAuthorizationConfig', dict] = attr.ib( default=None, converter=PropJobDefinitionAuthorizationConfig.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc:", "- ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.LogConfiguration\" rp_LogDriver: TypeHint.intrinsic_str =", "dict] = attr.ib( default=None, converter=PropJobDefinitionNodeProperties.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNodeProperties)), metadata={AttrMeta.PROPERTY_NAME: \"NodeProperties\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-nodeproperties\"\"\" p_Parameters: dict", "- ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-privileged - ``p_ReadonlyRootFilesystem``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-readonlyrootfilesystem -", "typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-schedulingpriority\"\"\" p_Timeout: typing.Union['PropJobDefinitionTimeout', dict] = attr.ib( default=None, converter=PropJobDefinitionTimeout.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionTimeout)), metadata={AttrMeta.PROPERTY_NAME: \"Timeout\"},", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup\"\"\" p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"SecurityGroupIds\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids\"\"\"", "= \"AWS::Batch::JobDefinition.NetworkConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE =", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AllocationStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-allocationstrategy\"\"\" p_BidPercentage: int = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-instancetypes - ``p_LaunchTemplate``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-launchtemplate - ``p_MinvCpus``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-minvcpus - ``p_PlacementGroup``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-placementgroup - ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-securitygroupids", "``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NodeProperties\" rp_MainNode: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME:", "\"AWS::Batch::JobQueue.ComputeEnvironmentOrder\" rp_ComputeEnvironment: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironment\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobqueue-computeenvironmentorder.html#cfn-batch-jobqueue-computeenvironmentorder-computeenvironment\"\"\" rp_Order:", "iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Permissions\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-permissions\"\"\" @attr.s class PropComputeEnvironmentEc2ConfigurationObject(Property): \"\"\" AWS Object Type", "Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html Property Document: - ``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap", "\"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid - ``p_AuthorizationConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig -", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-name\"\"\" rp_SoftLimit: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"SoftLimit\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-ulimit.html#cfn-batch-jobdefinition-ulimit-softlimit\"\"\"", "\"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"Privileged\"}, )", "AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html Property Document: - ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-type", "\"ResourceRequirements\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-resourcerequirements\"\"\" p_Secrets: typing.List[typing.Union['PropJobDefinitionSecret', dict]] = attr.ib( default=None, converter=PropJobDefinitionSecret.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionSecret), iterable_validator=attr.validators.instance_of(list))),", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.FargatePlatformConfiguration\" p_PlatformVersion: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"PlatformVersion\"}, )", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties\"\"\" rp_NumNodes: int = attr.ib( default=None, validator=attr.validators.instance_of(int), metadata={AttrMeta.PROPERTY_NAME: \"NumNodes\"}, ) \"\"\"Doc:", "\"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-computeenvironmentname\"\"\" p_ComputeResources: typing.Union['PropComputeEnvironmentComputeResources', dict] = attr.ib( default=None, converter=PropComputeEnvironmentComputeResources.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropComputeEnvironmentComputeResources)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeResources\"}, )", "TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Iam\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property):", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-schedulingpolicy.html#cfn-batch-schedulingpolicy-name\"\"\" p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib( default=None, validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"},", "- ``p_FargatePlatformConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-fargateplatformconfiguration - ``p_InstanceType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-instancetype - ``p_JobRoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-jobrolearn - ``p_LinuxParameters``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-linuxparameters -", "Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type: TypeHint.intrinsic_str", "attr.ib( default=None, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"Subnets\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-computeresources.html#cfn-batch-computeenvironment-computeresources-subnets\"\"\" rp_Type: TypeHint.intrinsic_str = attr.ib(", "AWS Object Type = \"AWS::Batch::JobDefinition.EfsVolumeConfiguration\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html Property Document: - ``rp_FileSystemId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-filesystemid", "validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionAuthorizationConfig)), metadata={AttrMeta.PROPERTY_NAME: \"AuthorizationConfig\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-efsvolumeconfiguration.html#cfn-batch-jobdefinition-efsvolumeconfiguration-authorizationconfig\"\"\" p_RootDirectory: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "``rp_ImageType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype - ``p_ImageIdOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imageidoverride \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::ComputeEnvironment.Ec2ConfigurationObject\" rp_ImageType: TypeHint.intrinsic_str = attr.ib(", "return GetAtt(resource=self, attr_name=\"Arn\") @attr.s class ComputeEnvironment(Resource): \"\"\" AWS Object Type = \"AWS::Batch::ComputeEnvironment\" Resource", "``p_LogConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-logconfiguration - ``p_Memory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-memory - ``p_MountPoints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-mountpoints - ``p_NetworkConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration - ``p_Privileged``:", "\"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.RetryStrategy\" p_Attempts: int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, )", "AWS Object Type = \"AWS::Batch::SchedulingPolicy.ShareAttributes\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html Property Document: - ``p_ShareIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"ShareIdentifier\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-shareattributes.html#cfn-batch-schedulingpolicy-shareattributes-shareidentifier\"\"\" p_WeightFactor: float = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(float)), metadata={AttrMeta.PROPERTY_NAME:", "``p_Devices``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-devices - ``p_InitProcessEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-initprocessenabled - ``p_MaxSwap``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-maxswap - ``p_SharedMemorySize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-sharedmemorysize - ``p_Swappiness``:", "\"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Ulimit\"", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"AccessPointId\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid\"\"\" p_Iam: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME:", "= \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-containerpath - ``p_HostPath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html#cfn-batch-jobdefinition-device-hostpath", "validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS Object Type", "int = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"UnmanagedvCpus\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-unmanagedvcpus\"\"\" p_Tags: dict =", "\"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.Device\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-device.html Property Document: - ``p_ContainerPath``:", "Object Type = \"AWS::Batch::JobDefinition.VolumesHost\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html Property Document: - ``p_SourcePath``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-volumeshost.html#cfn-batch-jobdefinition-volumeshost-sourcepath \"\"\"", "metadata={AttrMeta.PROPERTY_NAME: \"Attempts\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-retrystrategy.html#cfn-batch-jobdefinition-retrystrategy-attempts\"\"\" p_EvaluateOnExit: typing.List[typing.Union['PropJobDefinitionEvaluateOnExit', dict]] = attr.ib( default=None, converter=PropJobDefinitionEvaluateOnExit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionEvaluateOnExit),", "JobQueue(Resource): \"\"\" AWS Object Type = \"AWS::Batch::JobQueue\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html Property Document: -", "= attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(int)), metadata={AttrMeta.PROPERTY_NAME: \"Swappiness\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-linuxparameters.html#cfn-batch-jobdefinition-containerproperties-linuxparameters-swappiness\"\"\" p_Tmpfs: typing.List[typing.Union['PropJobDefinitionTmpfs', dict]] =", "Property Document: - ``rp_MainNode``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-mainnode - ``rp_NodeRangeProperties``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-noderangeproperties - ``rp_NumNodes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-nodeproperties.html#cfn-batch-jobdefinition-nodeproperties-numnodes \"\"\" AWS_OBJECT_TYPE", "validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnStatusReason\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onstatusreason\"\"\" @attr.s class PropJobDefinitionUlimit(Property): \"\"\" AWS Object Type", "Object Type = \"AWS::Batch::JobDefinition.AuthorizationConfig\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html Property Document: - ``p_AccessPointId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-accesspointid -", "= attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), metadata={AttrMeta.PROPERTY_NAME: \"ImageType\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-computeenvironment-ec2configurationobject.html#cfn-batch-computeenvironment-ec2configurationobject-imagetype\"\"\" p_ImageIdOverride: TypeHint.intrinsic_str = attr.ib(", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-authorizationconfig.html#cfn-batch-jobdefinition-authorizationconfig-iam\"\"\" @attr.s class PropJobDefinitionResourceRequirement(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.ResourceRequirement\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html", "Object Type = \"AWS::Batch::SchedulingPolicy.FairsharePolicy\" Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html Property Document: - ``p_ComputeReservation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-schedulingpolicy-fairsharepolicy.html#cfn-batch-schedulingpolicy-fairsharepolicy-computereservation -", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"State\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-computeenvironment.html#cfn-batch-computeenvironment-state\"\"\" p_UnmanagedvCpus: int = attr.ib( default=None,", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(dict)), metadata={AttrMeta.PROPERTY_NAME: \"Tags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-tags\"\"\" @attr.s class JobDefinition(Resource): \"\"\" AWS", "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html Property Document: - ``p_AssignPublicIp``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.NetworkConfiguration\" p_AssignPublicIp: TypeHint.intrinsic_str =", ") \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-networkconfiguration.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration-assignpublicip\"\"\" @attr.s class PropJobDefinitionLogConfiguration(Property): \"\"\" AWS Object Type = \"AWS::Batch::JobDefinition.LogConfiguration\" Resource", "Document: - ``rp_LogDriver``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-logdriver - ``p_Options``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-options - ``p_SecretOptions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties-logconfiguration.html#cfn-batch-jobdefinition-containerproperties-logconfiguration-secretoptions \"\"\" AWS_OBJECT_TYPE =", "AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Environment\" p_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"Name\"}, ) \"\"\"Doc:", "p_OnExitCode: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)), metadata={AttrMeta.PROPERTY_NAME: \"OnExitCode\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-evaluateonexit.html#cfn-batch-jobdefinition-evaluateonexit-onexitcode\"\"\" p_OnReason: TypeHint.intrinsic_str", "attr.ib( default=None, validator=attr.validators.optional(attr.validators.instance_of(bool)), metadata={AttrMeta.PROPERTY_NAME: \"PropagateTags\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-propagatetags\"\"\" p_RetryStrategy: typing.Union['PropJobDefinitionRetryStrategy', dict] = attr.ib(", "attr.ib( default=None, converter=PropJobDefinitionRetryStrategy.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionRetryStrategy)), metadata={AttrMeta.PROPERTY_NAME: \"RetryStrategy\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobdefinition.html#cfn-batch-jobdefinition-retrystrategy\"\"\" p_SchedulingPriority: int = attr.ib(", "default=None, converter=PropJobQueueComputeEnvironmentOrder.from_list, validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobQueueComputeEnvironmentOrder), iterable_validator=attr.validators.instance_of(list)), metadata={AttrMeta.PROPERTY_NAME: \"ComputeEnvironmentOrder\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-batch-jobqueue.html#cfn-batch-jobqueue-computeenvironmentorder\"\"\" rp_Priority: int = attr.ib(", "attr.ib( default=None, converter=PropJobDefinitionNetworkConfiguration.from_dict, validator=attr.validators.optional(attr.validators.instance_of(PropJobDefinitionNetworkConfiguration)), metadata={AttrMeta.PROPERTY_NAME: \"NetworkConfiguration\"}, ) \"\"\"Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-containerproperties.html#cfn-batch-jobdefinition-containerproperties-networkconfiguration\"\"\" p_Privileged: bool = attr.ib(", "Property Document: - ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-type - ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-resourcerequirement.html#cfn-batch-jobdefinition-resourcerequirement-value \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.ResourceRequirement\" p_Type:", "- ``rp_ValueFrom``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-batch-jobdefinition-secret.html#cfn-batch-jobdefinition-secret-valuefrom \"\"\" AWS_OBJECT_TYPE = \"AWS::Batch::JobDefinition.Secret\" rp_Name: TypeHint.intrinsic_str = attr.ib( default=None, validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),", "p_Ulimits: typing.List[typing.Union['PropJobDefinitionUlimit', dict]] = attr.ib( default=None, converter=PropJobDefinitionUlimit.from_list, validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropJobDefinitionUlimit), iterable_validator=attr.validators.instance_of(list))), metadata={AttrMeta.PROPERTY_NAME: \"Ulimits\"}, ) \"\"\"Doc:" ]
[ "monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings", "subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER,", "smtplib from email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT", "# coding: utf-8 # __buildin__ modules import smtplib from email.mime.text import MIMEText from", "python # coding: utf-8 # __buildin__ modules import smtplib from email.mime.text import MIMEText", "__buildin__ modules import smtplib from email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER from", "smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\"", "'plain', 'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp", "EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS)", "monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings", "# __buildin__ modules import smtplib from email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER", "msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR,", "utf-8 # __buildin__ modules import smtplib from email.mime.text import MIMEText from monitor.utils.settings import", "to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] =", "def sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST,", "to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain content \"\"\"", "= MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ',", "import smtplib from email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import", "from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def", "from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]:", "= subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0)", "msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT)", "sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject,", "\"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To']", "monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params:", "'.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit()", "params: to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject']", "smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content):", "msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain content \"\"\" if", "\"\"\" params: to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg = MIMEText(content, 'plain', 'utf-8')", "subject[str]: content[str]: plain content \"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject", "'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp =", "smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST:", "monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list", "modules import smtplib from email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings", "EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR", "import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain content", "subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content) else: raise ValueError('email", "content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content) else: raise ValueError('email list is empty')", "EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS", "coding: utf-8 # __buildin__ modules import smtplib from email.mime.text import MIMEText from monitor.utils.settings", "import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject,", "from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\"", "params: subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content) else: raise", "EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain content \"\"\"", "subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg = MIMEText(content,", "smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def", "content[str]: plain content \"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From']", "import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import", "email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings", "smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params:", "EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain", "content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg = MIMEText(content, 'plain',", "plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content) else: raise ValueError('email list is", "from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from", "def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg", "EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]:", "EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST", "_sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain content \"\"\" msg =", "= EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER,", "= ', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list,", "EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]:", "from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from", "', '.join(to_list) smtp = smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string())", "\"\"\" params: subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content) else:", "plain content \"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From'] =", "from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from", "from email.mime.text import MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from", "content): \"\"\" params: subject[str]: content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content)", "smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]: content[str]: plain content", "import MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import", "MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list)", "MIMEText from monitor.utils.settings import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER", "msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To'] = ', '.join(to_list) smtp = smtplib.SMTP_SSL()", "monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list,", "import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]:", "= smtplib.SMTP_SSL() smtp.set_debuglevel(0) smtp.connect(EMAIL_SERVER, EMAIL_PORT) smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject,", "EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content):", "import EMAIL_SERVER from monitor.utils.settings import EMAIL_PORT from monitor.utils.settings import EMAIL_USER from monitor.utils.settings import", "content[str]: plain content \"\"\" if EMAIL_LIST: _sendmail(EMAIL_LIST, subject, content) else: raise ValueError('email list", "msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR msg['To'] =", "import EMAIL_USER from monitor.utils.settings import EMAIL_PASS from monitor.utils.settings import EMAIL_FROM_ADDR from monitor.utils.email_list import", "monitor.utils.email_list import EMAIL_LIST def _sendmail(to_list, subject, content): \"\"\" params: to_addr[list]: subject[str]: content[str]: plain", "smtp.login(EMAIL_USER, EMAIL_PASS) smtp.sendmail(EMAIL_FROM_ADDR, to_list, msg.as_string()) smtp.quit() def sendmail(subject, content): \"\"\" params: subject[str]: content[str]:", "#!/usr/bin/env python # coding: utf-8 # __buildin__ modules import smtplib from email.mime.text import", "content \"\"\" msg = MIMEText(content, 'plain', 'utf-8') msg['Subject'] = subject msg['From'] = EMAIL_FROM_ADDR" ]
[ "QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다.", "끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드", "= 0 else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000", "chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price))", "sRQName, sTrCode, msg)) # ui = Ui_class() class Jango(): def __init__(self, code): self.jango=dict()", ": +(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력", "deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit)", "sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan})", "#예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에", "sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\")", "screen_overwrite.append(code) #미체결에 있는 종목들 for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code", "code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt +=", "QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString,", "self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) #", "# 출력 : +(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비'])", "0: temp_screen += 1 self.screen_real_stock = str(temp_screen) if (cnt % 50) == 0:", "\"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다. # lines = f.readlines() #파일에", "sRQName) for i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan", "\"0\") def setRealReg(self, companys): for code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids =", "sRQName, i, \"보유수량\") # 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun", "000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode,", "import sys from PyQt5.QAxContainer import * from PyQt5.QtCore import * from config.errorCode import", "for line in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. # if line !=", "이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한", "QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\",", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int,", "# 현재 가지고 있는 대상인지 파악 if sCode in self.account_stock_dict.keys(): try: # 스탑로스", "files = os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for code in codes:", "QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code in companys: screen_num", "type_dict = 4 elif order_type ==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\": type_dict", "%s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString,", "f.close() files = os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for code in", "\"거래대금\") # 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price =", "+(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 :", "사용할 비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate =", "관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행", "= \"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock", "씩 읽어와진다. # if line != \"\": # ls = line.split(\"\\t\") # stock_code", "0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan", "= {} ######################## ########################################## self.data = None ####### 요청 스크린 번호 self.screen_my_info =", "code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\",", "from PyQt5.QtCore import * from config.errorCode import * from PyQt5.QtTest import * from", "tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지 파악 if sCode in", "모두 읽어와 진다. # for line in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다.", "self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse :", "QString)\", sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS", "d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520", "sRQName, i, \"고가\").strip() # 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "# ls = line.split(\"\\t\") # stock_code = ls[0] # stock_name = ls[1] #", "self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count =", "# 레지스트리에 저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트", "% account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num)", "# 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인", "return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self,", "= int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money", "QString, int, QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가 : 000000000054100 learn_rate =", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price))", ": -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\") #", "sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\",", "account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name", "QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString,", "# tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price})", "k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k = abs(int(k))", "logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString,", "QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit)", "int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString,", "self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count", "order_price, hoga_dict, order_num] ) if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달", "#계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000,", "\"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit =", "sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString,", "\"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"체결량\") code =", "수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret", "sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\")", "QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real,", "learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp", "default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수", "stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan =", "del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s,", "= \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호", "str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)})", "data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick):", "\"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info)", "code = code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity", "int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName,", "# 출력: -6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010", "elif int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:]", "sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\")", "= int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : '' if", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034 매도일 때", "주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e: print(\"실시간 주식체결", "meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price =", "i, \"일자\") # 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\")", "# self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test() #", "int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "+= 1 # 실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if sRealType", "stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price})", "import * # from config.slack import * import logging from PyQt5.QtWidgets import *", "int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\")", "= float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp =", "item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif", "__init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\"", "tmp.update() logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code]) print(\"미체결 종목 : %s \"", "####### 요청 스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock =", "self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 : %s\" % self.output_deposit)", "abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i =", "= Ui_class() class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0", "''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return: ''' code_list", "self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0", "int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600)", "= int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code", "int(self.screen_meme_stock) if (cnt % 50) == 0: temp_screen += 1 self.screen_real_stock = str(temp_screen)", "# 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "+매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price =", "print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \",", "code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity})", "self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\",", "def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환", "QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code in", ": possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s", "할당 cnt = 0 for code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen =", "code in self.account_stock_dict: # dictionary 에 해당 종목이 있나 확인 pass else: self.account_stock_dict[code]", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j = abs(int(j)) k =", "# 출력 : +(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량'])", "정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict = {} ######################## ########################################## self.data = None", "start.\") ####### event loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프", "불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련", "QString)\", sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code]) print(\"미체결", "# 출력 : A039423 // 알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목 code", "sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun})", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) #", "총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price =", "= 0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict =", "count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret =", "= 0 #예수금 self.use_money = 0 #실제 투자에 사용할 금액 self.use_money_percent = 0.5", "sTrCode, sRQName, i, \"시가\").strip() # 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list def read_code(self): # if", "시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000)", "+= 1 self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)})", "self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): #", "이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def", "self.account_num = account_num logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다", "# dictionary 에 해당 종목이 있나 확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm", "tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량':", "range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm =", "= os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code]", "elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int,", "QString, QString, int, QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode,", "출력: 5 default : '' if chegual_quantity == '': chegual_quantity = 0 else:", "self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def", "J는 ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\":", "cnt += 1 # 실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if", "abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c =", "in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고", "싱글데이터 : %s - %s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows =", "‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’,", "QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict,", "not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\":", "self.use_money = 0 #실제 투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할", "for code in self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당", "= 0 else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k = abs(int(k)) if", "ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0:", "int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\")", "tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\":", "이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청", "int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "logging from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 # class", "수 있게 변환해 주는 함수 self.event_slots() # 키움과 연결하기 위한 시그널 / 슬롯", "= int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) #", "code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간'])", "dictionary 에 해당 종목이 있나 확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm =", "get def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s", "sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money =", "실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def", "config.errorCode import * from PyQt5.QtTest import * from config.kiwoomType import * # from", "######################## ########################################## self.data = None ####### 요청 스크린 번호 self.screen_my_info = \"2000\" #계좌", "2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value ==", "else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for", "''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list def read_code(self): #", "가져오기 self.portfolio_stock_dict = {} self.jango_dict = {} ######################## ########################################## self.data = None #######", "주문이면 주문번호 할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\":", "출력: 21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default:", "loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop()", "int, QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString,", "self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "type_dict = 3 elif order_type ==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\": type_dict", "self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게 변환해 주는 함수 self.event_slots() # 키움과", "이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간", "cnt) ret_data=list() for i in range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString, QString,", "= Slack() #슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\") ####### event", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력 : 000070", "QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity", "self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price})", "딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if", "ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__()", "가지고 있는 대상인지 파악 if sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가'])", "order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량'])", "%s, 요청이름: %s, tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode, msg)) # ui", "possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity})", "code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price", "int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "= order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account: pass", "line != \"\": # ls = line.split(\"\\t\") # stock_code = ls[0] # stock_name", "= str(temp_screen) if (cnt % 50) == 0: meme_screen += 1 self.screen_meme_stock =", "total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString,", "int, QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString,", "= int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price)) first_sell_price", ": %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \" % rows) #", ">0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count", ": 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() #", "이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\",", "if value == '0': logging.debug(\"장 시작 전\") elif value == '3': logging.debug(\"장 시작\")", "주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\",", "use_money = float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money / 4", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가 : 000000000054100 learn_rate", "= abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h", "default : '' if chegual_quantity == '': chegual_quantity = 0 else: chegual_quantity =", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int,", "i, \"보유수량\") # 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "f.readlines() #파일에 있는 내용들이 모두 읽어와 진다. # for line in lines: #줄바꿈된", "{} self.not_concluded_account = {} self.deposit = 0 #예수금 self.use_money = 0 #실제 투자에", "account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name", "start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력 :", "# 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def", "int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name =", "sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’,", "\"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\")", "sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\",", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default", "있는 대상인지 파악 if sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if", "self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if", "모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청", "= ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode =", "전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간 체결 정보 def", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의", "self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i", "int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code)", "종료, 동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분 장 종료\") for code in", "이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기", "있는 종목들 for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not in", "stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\",", "!= \"\": # ls = line.split(\"\\t\") # stock_code = ls[0] # stock_name =", "def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName =", "first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price))", "abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\":", "in self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code", "order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if", "########################################## self.data = None ####### 요청 스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한", "출력 : +(-)2530 k = abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c", "QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int,", "code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString,", "order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태'])", "(0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value", "print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode =", "self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\":", "sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString,", "import * from PyQt5.QtTest import * from config.kiwoomType import * # from config.slack", "# stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list()", "= stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\" order_number", ": 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입가\") #", "self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\")", "not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if code", "= \"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString,", "= float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s - %s\" % (total_buy_money, total_profit_loss_money,", "buy_price, learn_rate, current_price)) if code in self.account_stock_dict: # dictionary 에 해당 종목이 있나", "종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt", "sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\":", "HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b =", "def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) #", "e: print(\"실시간 주식체결 정보 : \", sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price,", "\"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock =", "self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code", "self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName)", "‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i", "마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan =", "\"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) #", "= int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price =", "in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명':", "temp_screen += 1 self.screen_real_stock = str(temp_screen) if (cnt % 50) == 0: meme_screen", "int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price) not_chegual_quan =", "#3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\",", "k = abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\":", "pickle import sys from PyQt5.QAxContainer import * from PyQt5.QtCore import * from config.errorCode", "self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") #", "self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code", "\"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString,", "-6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price =", "- 종목명: %s - 보유수량: %s - 매입가:%s - 수익률: %s - 현재가:", "-= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while", "#손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해 보자.", "sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\")", "sRQName, 0, \"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity =", "# 스크린번호 할당 cnt = 0 for code in screen_overwrite: temp_screen = int(self.screen_real_stock)", "# 실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info()", "‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회", "#잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명'])", "for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType ==", "[] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code =", "sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s -", "tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price})", "금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율 self.output_deposit = 0 #출력가능 금액", "logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value ==", "order_success # 실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) ==", "tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분':", "self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\":", "print(\"실시간 주식체결 정보 : \", sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type,", "date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"일자\") # 출력 :", "트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot)", "realdata_slot(self, sCode, sRealType, sRealData): if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전,", "= stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량'])", "err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다", "self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000)", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money", "== 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name =", "in self.account_stock_dict: # dictionary 에 해당 종목이 있나 확인 pass else: self.account_stock_dict[code] =", "self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\",", "self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids, \"1\") def", "os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\")", "int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit()", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume),", "0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString,", "sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034 매도일 때 g = abs(int(g))", "self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를", "else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default :", "order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 :", "= self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType", ": 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\") #", "# if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\",", "print(\"\\n계좌에 가지고 있는 종목은 %s \" % rows) # for item in self.account_stock_dict.keys():", "i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 :", "출력 : +(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) #", "= abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i", "익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret", "Exception as e: print(\"실시간 주식체결 정보 : \", sCode,a, b) def send_order(self,order_type, sCode,", "QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return", "sTrCode, sRQName, i, \"고가\").strip() # 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code in self.not_concluded_account.keys(): code =", "i, \"수익률(%)\") # 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "elif order_type ==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\": type_dict = 5 elif", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : '' if chegual_price == '': chegual_price", "= 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\",", "= 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict", "logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt,", "import * from config.errorCode import * from PyQt5.QtTest import * from config.kiwoomType import", "defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status", "read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다. # f =", "sRQName, i, \"종목번호\") # 출력 : A039423 // 알파벳 A는 장내주식, J는 ELW종목,", "# print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self,", "(cnt % 50) == 0: meme_screen += 1 self.screen_meme_stock = str(meme_screen) if code", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun =", "= QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict = {}", "= self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지 파악 if", "= int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm})", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"일자\") # 출력 : 000070", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력:", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가'])", "+(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 :", "code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력 :", "abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f =", "order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan", "as e: print(\"실시간 주식체결 정보 : \", sCode,a, b) def send_order(self,order_type, sCode, order_quantity,", "#슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\") ####### event loop를 실행하기", "int(ok_quantity.strip()) if code in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code]", "self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num,", "sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s - 보유수량: %s -", "order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString,", "# # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType()", "code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip())", "stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\" order_number =", "order_num=\"\"): if order_type == \"신규매수\": type_dict = 1 elif order_type ==\"신규매도\": type_dict =", "multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName = \"3분봉조회\"", "fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\",", "self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int,", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금", "QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s - 보유수량: %s", "if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price})", "\"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결", "= str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not", "= int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가'])", "logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit()", "\", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e: print(\"실시간 주식체결 정보 : \",", "int, QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString,", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\",", "= self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"):", "self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간", "self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a =", "#미체결에 있는 종목들 for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not", "'0': logging.debug(\"장 시작 전\") elif value == '3': logging.debug(\"장 시작\") elif value ==", "확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan = int(order_quan)", "사용할 수 있게 변환해 주는 함수 self.event_slots() # 키움과 연결하기 위한 시그널 /", "in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\":", "hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict = 1 elif order_type ==\"신규매도\": type_dict", "self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName)", "종목 : %s \" % self.not_concluded_account[code]) print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango)", "요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목", "self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률", "인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다. # lines = f.readlines() #파일에 있는", "abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h =", "5 default : '' if chegual_quantity == '': chegual_quantity = 0 else: chegual_quantity", "self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0.", "total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate)", "3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price =", "chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력:", "buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity})", "QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s -", "#계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행 #########################################", "stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price =", "\"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d = float(d) e", "'3': logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value", "if stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo, sRQName,", "= abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price", "-매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1:", "int, QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price,", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\",", "sFidList): if int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\",", "not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun =", "= abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j", "c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98", "possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \"", "possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s -", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력 : 000070 high_price", "from PyQt5.QAxContainer import * from PyQt5.QtCore import * from config.errorCode import * from", "event loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop =", "if order_type == \"신규매수\": type_dict = 1 elif order_type ==\"신규매도\": type_dict = 2", "‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’,", "# (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if", "self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price =", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int,", "int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip())", "self.not_concluded_account[code]) print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName ==", "QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop =", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호'])", "self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode] #송수신", "PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class(): #", "%s\" % cnt) ret_data=list() for i in range(cnt): data = [] code =", "start. \") print(\"Kiwoom() class start.\") ####### event loop를 실행하기 위한 변수모음 self.login_event_loop =", "# lines = f.readlines() #파일에 있는 내용들이 모두 읽어와 진다. # for line", "order_type ==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\": type_dict = 4 elif order_type", "= QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() #########################################", "21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0", "######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict = {} ######################## ##########################################", "관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\")", "sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number =", "self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan =", "0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ########", "QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext):", "self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트", "self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"]", "000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력", "self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] ) if order_success == 0:", "not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재", "sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "\"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self,", "받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code)", "self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\":", "QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def", "관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) #", "self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\",", "def update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용", "value == '0': logging.debug(\"장 시작 전\") elif value == '3': logging.debug(\"장 시작\") elif", "i, \"매입가\") # 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name", "= int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가'])", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan)", "ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp", ": defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호", "QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return:", "print(\"잔고\") # print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지 get def", "sTrCode, sRQName) for i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "######################################### ####### 계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account = {} self.deposit =", "\"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\",", ": +(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력", "str(temp_screen) if (cnt % 50) == 0: meme_screen += 1 self.screen_meme_stock = str(meme_screen)", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString,", "# 출력 : 240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가'])", ": 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip() #", "-= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception 현재 가지고 있는 잔고", "if int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:]", "셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게 변환해 주는", "\"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString,", "‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i in range(cnt): data", "logging.debug(\"종목코드: %s - 종목명: %s - 보유수량: %s - 매입가:%s - 수익률: %s", "= order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가'])", "\"현재가\") # 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "= 0.5 #예수금에서 실제 사용할 비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money =", "i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\")", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j = abs(int(j)) k", "= f.readlines() #파일에 있는 내용들이 모두 읽어와 진다. # for line in lines:", "스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할", "else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price =", "from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class():", "QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString,", "1 elif order_type ==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\": type_dict = 3", "# 출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3", "= 0.03 # class Ui_class(): # def __init__(self): # self.app = QApplication(sys.argv) #", "= \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\"", "% cnt) ret_data=list() for i in range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString,", "QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code in companys:", "__init__(self): super().__init__() self.realType = RealType() # self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class", "sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value", "QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "sRQName, 0, \"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "* from PyQt5.QtTest import * from config.kiwoomType import * # from config.slack import", "self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt = 0", "int, QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString,", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호'])", "except Exception as e: print(\"실시간 주식체결 정보 : \", sCode,a, b) def send_order(self,order_type,", "있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일", "# QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString,", "요청이름: %s, tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode, msg)) # ui =", "abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k =", "출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip()", "i, \"현재가\").strip() # 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\",", "#주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명'])", "QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력 : 000070 trading_value =", "QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString,", "슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청", "int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString,", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"체결량\") code = code.strip() code_nm =", "QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def", "출력 : A039423 // 알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목 code =", "int(use_money) self.use_money = self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list", "# data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’,", "QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString,", "in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\") #", "\", sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type ==", "int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan)", "QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력 : 000070 volume =", "abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode)", "self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터", "0, \"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "#종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호 ######################################## #########", "# class Ui_class(): # def __init__(self): # self.app = QApplication(sys.argv) # self.kiwoom =", "int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip())", "1 self.screen_real_stock = str(temp_screen) if (cnt % 50) == 0: meme_screen += 1", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f = abs(int(f)) g =", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price))", "Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량", "g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124", "\"매입가\") # 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "{} ######################## ########################################## self.data = None ####### 요청 스크린 번호 self.screen_my_info = \"2000\"", "= \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int, int, QString,", "self.use_money = int(use_money) self.use_money = self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int,", "codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys():", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate", "#줄바꿈된 내용들이 한줄 씩 읽어와진다. # if line != \"\": # ls =", "= int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info)", "first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\",", "STOP_PROFIT_RATE = 0.03 # class Ui_class(): # def __init__(self): # self.app = QApplication(sys.argv)", "None ####### 요청 스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock", "code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code in self.not_concluded_account.keys(): code", "ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) #", "# stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close()", "def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self,", "딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\",", "self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름:", "\"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode,", "code in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code})", "스크린번호 할당 cnt = 0 for code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen", "# tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate})", "있는 종목들 for code in self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #미체결에", "계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account = {} self.deposit = 0 #예수금", "for code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt %", "QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "class start. \") print(\"Kiwoom() class start.\") ####### event loop를 실행하기 위한 변수모음 self.login_event_loop", "int, QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity =", "전\") elif value == '3': logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장 종료,", "초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게 변환해", "출력 : +240124 매수일때, -2034 매도일 때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString,", "'', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code in companys: screen_num = self.not_concluded_account[code]['스크린번호']", "if code in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드':", "\"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString,", "str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in", "# 출력 : +(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가'])", ": %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString,", "for f in files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] = Jango(code) return", "한줄 씩 읽어와진다. # if line != \"\": # ls = line.split(\"\\t\") #", "준다. trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\",", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 :", "= int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun =", "# 계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun", "sTrCode, sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초", "h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530", "\"거래량\").strip() # 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역", "% 50) == 0: temp_screen += 1 self.screen_real_stock = str(temp_screen) if (cnt %", "self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "# 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE))", "str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터 얻어오기 def realdata_slot(self, sCode,", "할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호 ######################################## ######### 초기", "= self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\")", "level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType() # self.slack = Slack()", "%s\" % ( code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code in self.account_stock_dict:", "sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString,", "경로에 파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로", "sTrCode, sRQName, i, \"거래량\").strip() # 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int,", "rows) # for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\")", "self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType =", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan)", "RealType() # self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class", "dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\")", "tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\":", "code in self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt", "새로 들어온 주문이면 주문번호 할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp =", "code = self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int,", "‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for", "if chegual_price == '': chegual_price = 0 else: chegual_price = int(chegual_price) chegual_quantity =", "QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\"", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value == '0': logging.debug(\"장 시작 전\") elif", "sTrCode, sRQName, i, \"거래대금\") # 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int,", "in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if code not", "// 알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm =", "{} self.deposit = 0 #예수금 self.use_money = 0 #실제 투자에 사용할 금액 self.use_money_percent", "= [] #계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys(): if code not in", "QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다.", "self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에", "sTrCode, sRQName, 0, \"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "order_type == \"신규매수\": type_dict = 1 elif order_type ==\"신규매도\": type_dict = 2 elif", "0.03 # class Ui_class(): # def __init__(self): # self.app = QApplication(sys.argv) # self.kiwoom", "__init__(self): # self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test()", "self.account_stock_dict: # dictionary 에 해당 종목이 있나 확인 pass else: self.account_stock_dict[code] = Jango(code)", "+(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 :", "QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString,", "% 50) == 0: meme_screen += 1 self.screen_meme_stock = str(meme_screen) if code in", ": 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self,", "if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감)", "ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code]) print(\"미체결 종목", "int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString,", "정보 : \", sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if", "if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해 보자. 기존", ": +(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력", "tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode,", "# 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다.", "보유수량: %s - 매입가:%s - 수익률: %s - 현재가: %s\" % ( code,", "\"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\"", "self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함", "\"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다. # lines", "int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate =", "관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결", "int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "# 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count", "\"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info)", "sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\":", "#로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock()", "\"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): #", "번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린", "order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString,", "self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’.", ":param market_code: 시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1]", "current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan =", "000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h = abs(int(h)) i =", "sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\",", "print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif", "self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity =", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력 : 000070 volume", "first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}})", "0: del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린:", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034 매도일 때 g", "!= 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해 보자. 기존 주식과 합산", "QString, int, QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity,", "int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in", "def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self):", "내용들이 한줄 씩 읽어와진다. # if line != \"\": # ls = line.split(\"\\t\")", "in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm", "체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) == 0: #주문체결 account_num", "ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1])", "연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널", "\"시가\").strip() # 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "수익률: %s - 현재가: %s\" % ( code, code_nm, stock_quantity, buy_price, learn_rate, current_price))", "class start.\") ####### event loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용", "변환해 주는 함수 self.event_slots() # 키움과 연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot()", "QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\",", "sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s - 보유수량: %s - 매입가:%s", "i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\") #", "code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code]", "QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력 : 000070 low_price =", "+(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 :", "#5초 뒤에 미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간", "# 출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당 if", "내용을 읽어 오겠다는 뜻이다. # lines = f.readlines() #파일에 있는 내용들이 모두 읽어와", "Kiwoom() # ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def", "self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] ) if order_success == 0: logging.debug(\"%s", "pass else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price =", "연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code:", "\"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock =", "장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭 : 한국기업평가", "== \"신규매수\": type_dict = 1 elif order_type ==\"신규매도\": type_dict = 2 elif order_type", "= QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account = {}", "= abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d", "%s \" % self.not_concluded_account[code]) print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() #######################################", "self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지", "‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i in range(cnt): data =", "print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간 체결 정보", "self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 :", "시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을", "+(-)2530 k = abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode]", "class Ui_class(): # def __init__(self): # self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom()", "= abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k", "type_dict = 6 if hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict", "data = [] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\")", ") if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString,", "order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price})", "= Kiwoom() # ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget):", "self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop()", "float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate))", "'' if chegual_quantity == '': chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity) current_price", "현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a,", "k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and", ": 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip() #", "읽어와 진다. # for line in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. #", "tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\",", "QString)\", sTrCode, sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) #", "QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString,", "current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price))", "종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict = {} ######################## ########################################## self.data =", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력 : 000070", "ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception", "int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\":", "tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code]) print(\"미체결 종목 :", "# ui = Ui_class() class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] =", "self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\":", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money =", "import * from PyQt5.QtCore import * from config.errorCode import * from PyQt5.QtTest import", "스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장", "stock_code = ls[0] # stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price", ": +(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력", "\"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data", "self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력:", "tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num", "장내주식, J는 ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int,", "self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] !=", "3 elif order_type ==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\": type_dict = 5", "QString, int, QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가 : 000000003450 total_chegual_price =", "오겠다는 뜻이다. # lines = f.readlines() #파일에 있는 내용들이 모두 읽어와 진다. #", "\"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info)", "update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해", "self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k = abs(int(k)) if sCode not in self.not_concluded_account:", ": 240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력", "order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도, +매수,", "= 0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ########################################", "내용들이 모두 읽어와 진다. # for line in lines: #줄바꿈된 내용들이 한줄 씩", "#장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self,", "companys): for code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString,", "order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) #", "\"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext,", "= {} self.jango_dict = {} ######################## ########################################## self.data = None ####### 요청 스크린", "sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "#보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0", "total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money)", "self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict =", "# 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price", "= \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock", "\"신규매수\": type_dict = 1 elif order_type ==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\":", "1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\",", "if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys():", "# 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌", "code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력", "self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e: print(\"실시간 주식체결 정보 : \", sCode,a,", "not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name})", "= 0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class(): # def __init__(self): # self.app", "def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\",", "i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data =", "ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\",", "\"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict =", "tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) #", "# if line != \"\": # ls = line.split(\"\\t\") # stock_code = ls[0]", "키움과 연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널 /", "출력 : 3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000", "int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString,", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_()", "def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict =", "if chegual_quantity == '': chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity) current_price =", "self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number", "= int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity =", "#총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict = {}", "msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode, msg))", "self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : '' if chegual_price == '': chegual_price =", "QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\",", "class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0", "\"SendOrder(QString, QString, QString, int, QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict,", "int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034 매도일 때 g =", "msg)) # ui = Ui_class() class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"]", "elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt", "준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num =", "sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트", "= [] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가'])", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k = abs(int(k)) if sCode", "sRQName, i, \"일자\") # 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "in files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] = Jango(code) return codes def", "= 1 elif order_type ==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\": type_dict =", "stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량 :", "if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys():", "매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\")", "code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code in self.account_stock_dict: # dictionary 에 해당", "종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수", "self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "== \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code", "번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료", "sys.exit() elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력", "가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate =", "50) == 0: meme_screen += 1 self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys():", "접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan =", "b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b = abs(int(b))", "\"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code = code.strip() code_name", "f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는", "logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를", "elif order_type ==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\": hoga_dict = \"00\" elif", "= self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격':", "요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결", "and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k)", "%(sScrNo, sRQName, sTrCode, msg)) # ui = Ui_class() class Jango(): def __init__(self, code):", "int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString,", "logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \" % rows)", "출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\")", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money", "\"고가\").strip() # 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액", "e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e = abs(int(e))", "type_dict = 5 elif order_type ==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\": hoga_dict", "/ 슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인", "code_list.split(';')[:-1] return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지", "\"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도,", "tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\":", "self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString,", "ls[0] # stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price)", "g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034 매도일", "self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해 보자. 기존 주식과 합산 계산 되는지", "code in self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for", "in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt = 0 for code in screen_overwrite:", "# 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "#계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\"", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString,", "self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0:", "setRealReg(self, companys): for code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString,", "QString)\", sTrCode, sRQName, i, \"일자\") # 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString,", "self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price", "= self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString,", "출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당 if sCode", ": \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e: print(\"실시간 주식체결 정보 :", "출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b", "from config.errorCode import * from PyQt5.QtTest import * from config.kiwoomType import * #", "금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목 정보", "\"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "if code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code in self.not_concluded_account.keys():", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) #", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력 : A039423 //", "= self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo)", "[…]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i in range(cnt): data = []", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가'])", "def __init__(self): super().__init__() self.realType = RealType() # self.slack = Slack() #슬랙 동작 #print(\"kiwoom()", "order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity})", "0 else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default", "sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\")", "range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110", "int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price)) first_sell_price =", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int,", "self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프", "#포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code)", "= int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account: pass else: self.not_concluded_account[code] =", "print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b:", "= QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop", "self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호", "= int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate", "= abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f", "sTrCode, sRQName, i, \"체결량\") code = code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip())", "self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0", "= self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’,", "ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret =", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도, +매수, -매도정정,", "chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : ''", "from PyQt5.QtTest import * from config.kiwoomType import * # from config.slack import *", "api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션", "not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account: pass else: self.not_concluded_account[code]", "STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class(): # def __init__(self): #", "읽어와진다. # if line != \"\": # ls = line.split(\"\\t\") # stock_code =", "current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" %", "print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo,", "total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code =", "b) pass except Exception as e: print(\"실시간 주식체결 정보 : \", sCode,a, b)", "buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity", "# 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") #", "QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "출력 : +(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) #", "stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력", "abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d =", "self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\") #######", "self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code in", "#print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\") ####### event loop를 실행하기 위한 변수모음", "출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력:", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\",", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\",", "tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' :", "이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop =", "1000}) tmp.update() logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code]) print(\"미체결 종목 : %s", "6 if hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\"", "total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code", "QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\",", "파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면", "출력 : +(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) #", ": %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\",", "tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지 파악", ": +(-)2530 k = abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c =", "order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account:", "# 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self):", "확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price", "0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\",", "QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString,", "sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName)", "\"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\",", "self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련", "self.deposit = 0 #예수금 self.use_money = 0 #실제 투자에 사용할 금액 self.use_money_percent =", "#계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\"", "번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들", "self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count", "#3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1]", "config.slack import * import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE", "주문번호 할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode})", "# 출력 : +(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가'])", "‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]]", "% self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode,", "‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\"", "\"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString,", "QString)\", sTrCode, sRQName) for i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "self.realType = RealType() # self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class start. \")", "# 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520", "self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName,", "int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" %", "* from config.kiwoomType import * # from config.slack import * import logging from", "stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name,", "[] #계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys(): if code not in screen_overwrite:", "QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1", "\"예수금\") self.deposit = int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money", "int, QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString,", "range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력", "- %s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode,", "h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h = abs(int(h))", "#예수금에서 실제 사용할 비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액", "= self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호", "0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict =", "QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\",", "stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price})", "QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString,", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간'])", "code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기 def", "sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"체결량\")", ": +(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력", "주는 함수 self.event_slots() # 키움과 연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot() #", "int, QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString,", "\"일자\") # 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "sTrCode, sRQName, i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString,", "screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt % 50) == 0:", "파악 if sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and", "sTrCode, sRQName, i, \"현재가\") # 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data", "self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600)", ": %s \" % self.not_concluded_account[code]) print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit()", "보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입가\")", "sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\",", "1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분)", "= int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가'])", "출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\")", "code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun})", "계좌번호 반환 account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\" % account_num)", "요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop", "print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'],", "in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는", "if hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success", "\" % self.not_concluded_account[code]) print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif", "chegual_price == '': chegual_price = 0 else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\",", "요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행 ######################################### #", "* import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03", "\"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code =", "order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan = int(order_quan) order_price =", "== \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money", "account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600)", "sRQName, i, \"체결량\") code = code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status", "Jango(code) return codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for code", "i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s - 보유수량: %s - 매입가:%s -", "Q는 ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot)", "이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit()", "%s \" % rows) # for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext", "<gh_stars>0 import os import pickle import sys from PyQt5.QAxContainer import * from PyQt5.QtCore", "for code in codes: self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self): screen_overwrite =", "요청 스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\"", "관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별", "self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e = abs(int(e)) f", "elif order_type ==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\": type_dict = 3 elif", "import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class(): # def", "self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들 바로", "i, \"고가\").strip() # 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception 현재 가지고", ": 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity", "fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기 def event_slots(self):", "meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\",", "self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString,", "self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code):", "self.account_stock_dict = {} self.not_concluded_account = {} self.deposit = 0 #예수금 self.use_money = 0", "000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력", "포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code()", "15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도,", "self.portfolio_stock_dict = {} self.jango_dict = {} ######################## ########################################## self.data = None ####### 요청", "가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작", "\"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString,", "self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가 : 000000003450 total_chegual_price", "# 출력 : +(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가'])", "sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\":", "있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass", "# 출력: 21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15,", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\",", "# -매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "레지스트리에 저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot)", "\" % rows) # for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext ==", "for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit()", "PyQt5.QAxContainer import * from PyQt5.QtCore import * from config.errorCode import * from PyQt5.QtTest", "stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode,", "# self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list() for f in files:", "= line.split(\"\\t\") # stock_code = ls[0] # stock_name = ls[1] # stock_price =", "-2034 매도일 때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) #", "= 4 elif order_type ==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\": type_dict =", "self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType() # self.slack", "not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun", "Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType() # self.slack = Slack() #슬랙 동작", "비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception", "관련된 변수 self.account_stock_dict = {} self.not_concluded_account = {} self.deposit = 0 #예수금 self.use_money", "sRQName, i, \"거래대금\") # 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "2 elif order_type ==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\": type_dict = 4", "value == '3': logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\")", "= code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") #", "self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price", "\"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\",", "self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정", "print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\":", "# 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "= 5 elif order_type ==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\": hoga_dict =", "order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString,", "변수 self.account_stock_dict = {} self.not_concluded_account = {} self.deposit = 0 #예수금 self.use_money =", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString,", "int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-')", "sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) # data", "order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str =", "= RealType() # self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom()", "code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt = 0 for code", "+240124 매수일때, -2034 매도일 때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif", "\"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\",", "QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\",", "Ui_class(): # def __init__(self): # self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom() #", "def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\",", "QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int,", "# 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결", "import * from config.kiwoomType import * # from config.slack import * import logging", "ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"체결량\") code = code.strip()", "stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan", "#실시간 수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '',", "= open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다.", "screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if", "self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price =", "QString)\", sTrCode, sRQName, i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString,", "출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : '' if", "b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520", "self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self):", "000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity =", "tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango)", "#장 시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX", "self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\":", "self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self,", "\"1000\" #장 시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance()", "sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) #", "%s\" %(sScrNo, sRQName, sTrCode, msg)) # ui = Ui_class() class Jango(): def __init__(self,", "int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] )", "\"4\": logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000)", "종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys):", "self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당", "# print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\":", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c = abs(int(c)) d =", "QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’,", "* # from config.slack import * import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE", "들어온 주문이면 주문번호 할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode]", "total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\":", "# f.close() files = os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for code", "실제 사용할 비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate", "# 출력 : defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061", "default : '' if chegual_price == '': chegual_price = 0 else: chegual_price =", "요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된 변수", "가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b)", "cnt = 0 for code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock)", "code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt % 50)", "= self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\":", "= 3 elif order_type ==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\": type_dict =", "chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고", "예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된", "출력: -6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price", "#계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code)", "elif value == '3': logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장 종료, 동시호가로", "0, \"예수금\") self.deposit = int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money = int(use_money)", "self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price})", "get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인", "해당 경로에 파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을", "\"보유수량\") # 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "sTrCode, sRQName, i, \"매입가\") # 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int,", "수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'],", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan", "def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): '''", "로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가", "order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) ==", "account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self,", "for code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString,", "nItemCnt, sFidList): if int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode =", "self.not_concluded_account = {} self.deposit = 0 #예수금 self.use_money = 0 #실제 투자에 사용할", "sRealData): if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분),", "chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : '' if chegual_price ==", "print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’,", "int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString,", "5 elif order_type ==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\": hoga_dict = \"00\"", "실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) == 0: #주문체결", "#0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list", "sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s", "elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏", "주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프", "= int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-')", "if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\":", "int, QString)\", sTrCode, sRQName, i, \"체결량\") code = code.strip() code_nm = code_nm.strip() order_no", "in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt % 50) ==", "# 키움과 연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널", "\"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력 : 000070", "비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0", "종목은 %s \" % rows) # for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if", "self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"]", "= QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test() # # self.app.exec_()", "self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\",", "= None ####### 요청 스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호", "#파일에 있는 내용들이 모두 읽어와 진다. # for line in lines: #줄바꿈된 내용들이", "tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\":", "sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b", "self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName", "value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분 장", "tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에", "함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게 변환해 주는 함수", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s", "# 출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분']) #", "\"현재가\").strip() # 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode,", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f = abs(int(f)) g", "주식체결 정보 : \", sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"):", "= int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files =", "종목들 for code in self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호", "\"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기", "order_type ==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\": type_dict = 6 if hoga_type", "루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\")", "000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력", "\"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "2110 default : '' if chegual_price == '': chegual_price = 0 else: chegual_price", "in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName", "뜻이다. # lines = f.readlines() #파일에 있는 내용들이 모두 읽어와 진다. # for", "0, \"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "출력 : +(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) #", "def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\",", "float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e =", "sCode, sRealType, sRealData): if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분),", "매수일때, -2034 매도일 때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량'])", "종목이 있나 확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity =", "self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\":", "for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code)", "k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString,", "buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에", "os import pickle import sys from PyQt5.QAxContainer import * from PyQt5.QtCore import *", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\",", "== \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit", ": +(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력", "self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret", "market_code) code_list = code_list.split(';')[:-1] return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당", "code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\":", "스크린번호 ######################################## ######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할", "def setRealReg(self, companys): for code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간']", "buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price", "int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) #", "order_num] ) if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else:", "elif value == \"4\": logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\",", "self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. #", "\"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"):", "QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString,", "종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price", "= Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate =", "account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num = account_num", "sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit =", "sTrCode, sRQName, i, \"종목번호\") # 출력 : A039423 // 알파벳 A는 장내주식, J는", "sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count", "QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan})", "‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i in", "sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict()", "######################################## ######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수", "QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] ) if", "buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가 :", "할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\"", "self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode", "sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money = float(self.deposit) *", "self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\")", "sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "+매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity =", "self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money", "sCode, order_quantity, order_price, hoga_dict, order_num] ) if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type)", "code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString,", "self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] =", "import os import pickle import sys from PyQt5.QAxContainer import * from PyQt5.QtCore import", "hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int, int,", "# print(\"잔고\") # print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지 get", "== 0: del self.jango_dict[sCode] #송수신 메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode, msg):", "self.jango_dict = {} ######################## ########################################## self.data = None ####### 요청 스크린 번호 self.screen_my_info", "self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태':", "위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금", "count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"]", "출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan", "self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName)", "logging.debug(\"장 종료, 동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분 장 종료\") for code", "self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분'])", "QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "파일 내용을 읽어 오겠다는 뜻이다. # lines = f.readlines() #파일에 있는 내용들이 모두", "tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지 파악 if sCode in self.account_stock_dict.keys(): try:", "% self.not_concluded_account[code]) print(\"미체결 종목 : %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName", "에 해당 종목이 있나 확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip()", "a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\",", "시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청", "QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력 : 000070 high_price =", "######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료 세팅", "self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode", "low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력 :", "sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode,", "+(-)12.98 d = float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 :", "while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -=", "= int(use_money) self.use_money = self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "PyQt5.QtTest import * from config.kiwoomType import * # from config.slack import * import", "한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량", "print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows", "0 for code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt", "def realdata_slot(self, sCode, sRealType, sRealData): if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] #", "pass except Exception as e: print(\"실시간 주식체결 정보 : \", sCode,a, b) def", "# tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext)", "= \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString,", "c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c = abs(int(c))", "요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면", "%s, tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode, msg)) # ui = Ui_class()", "처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list", "\" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\",", "self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : '' if chegual_quantity == '': chegual_quantity =", "계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun =", "정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as", ": '' if chegual_quantity == '': chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity)", "# 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료", "9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value == '0': logging.debug(\"장 시작", "\"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분 장 종료\") for", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : '' if chegual_price == '':", "self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수,", "first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당 if sCode not in", "QString)\", sTrCode, sRQName, 0, \"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int,", "i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드:", "screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if code not in screen_overwrite:", ">0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력 : 000070 date", "hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString,", "== \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분 장 종료\")", "int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를", "tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\":", "order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity", "출력 : +(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) #", "tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity})", "% rows) # for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\":", "0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g})", "self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호", ": %s\" % self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName", "* STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class(): # def __init__(self):", "try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e:", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip()", ": %s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt =", "QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString,", "except Exception as e: print(e) print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try:", "0.5 #예수금에서 실제 사용할 비율 self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money = 0", "사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율 self.output_deposit = 0 #출력가능", "pass else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호':", "위한 시그널 / 슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯 연결", "current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\":", "i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "-6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당 if sCode not", "# from config.slack import * import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE =", "진다. # for line in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. # if", "if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k)", "order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price)", "fid) if value == '0': logging.debug(\"장 시작 전\") elif value == '3': logging.debug(\"장", "실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() #", "로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트 self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot)", "정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e: print(\"실시간 주식체결 정보", "실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에", "QString, QString, QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된", "int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money =", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량'])", "-매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price", "i, \"종목번호\") # 출력 : A039423 // 알파벳 A는 장내주식, J는 ELW종목, Q는", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력 : 000070", "‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt)", "sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분", "hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success =", "i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no", "1 # 실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if sRealType ==", "있는 내용들이 모두 읽어와 진다. # for line in lines: #줄바꿈된 내용들이 한줄", "==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall(", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money =", "\"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s - %s\" %", "기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: #", "sRQName, i, \"시가\").strip() # 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] ) if order_success", "ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\"", "sRQName, i, \"주문구분\") # -매도, +매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int,", "self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids", "learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext :", "{} self.jango_dict = {} ######################## ########################################## self.data = None ####### 요청 스크린 번호", "####################################### elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode)", "self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code)", "current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\",", "companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code,", "self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해 보자. 기존 주식과", "chegual_price = 0 else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력:", "self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는", "hoga_type ==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString,", "= \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick)", "self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함", "in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan})", "def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName ==", "float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code]", "return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다.", "성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간 체결", "self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 :", "QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money = float(self.deposit) * self.use_money_percent", "code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name =", "sTrCode, sRQName, i, \"보유수량\") # 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "QString)\", sTrCode, sRQName, i, \"체결량\") code = code.strip() code_nm = code_nm.strip() order_no =", "* from config.errorCode import * from PyQt5.QtTest import * from config.kiwoomType import *", "else: logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간 체결 정보 def chejan_slot(self, sGubun,", "if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number})", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량 : 000000000000010 buy_price", "self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "i, \"체결량\") code = code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status =", "= Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status})", "code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭 :", "== '': chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가'])", "self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\"", "self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\"))", "\"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext,", "000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"일자\") # 출력", "sCode, fid) if value == '0': logging.debug(\"장 시작 전\") elif value == '3':", "self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\":", "tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\"", "self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭:", "int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : '' if chegual_quantity", "def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_()", "-매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity", "\"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"):", "int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString,", "self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린", "sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s --- %s\" %(sScrNo, sRQName,", "def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys(): if", "0: meme_screen += 1 self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)})", "self.realType.REALTYPE['주문체결']['주문구분']) # 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) #", "포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널", "현재가: %s\" % ( code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code in", "/ 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금", "self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", ": 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") #", "self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "A는 장내주식, J는 ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString,", "(total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows):", "== '0': logging.debug(\"장 시작 전\") elif value == '3': logging.debug(\"장 시작\") elif value", "config.kiwoomType import * # from config.slack import * import logging from PyQt5.QtWidgets import", "real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self):", "line in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. # if line != \"\":", "# 실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) == 0:", "self.event_slots() # 키움과 연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot() # 실시간 이벤트", "#로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop()", "order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip())", "like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\")", "= 0 #실제 투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율", "% self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\":", "import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE = 0.03 #", "count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception 현재 가지고 있는", "screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt = 0 for code in screen_overwrite: temp_screen", "int, QString)\", sTrCode, sRQName, i, \"일자\") # 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString,", "self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "= abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호 할당 if sCode not in self.not_concluded_account.keys():", "= ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}})", "self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널 포함", "ls = line.split(\"\\t\") # stock_code = ls[0] # stock_name = ls[1] # stock_price", "= int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price) not_chegual_quan", "스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param", "0 #예수금 self.use_money = 0 #실제 투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서", "logging.debug(\"장 시작 전\") elif value == '3': logging.debug(\"장 시작\") elif value == \"2\":", "QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString,", "b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지 파악 if sCode in self.account_stock_dict.keys():", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price", "\"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호 ########################################", "print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e:", "int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다", "logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i in range(cnt): data = [] code", "sTrCode, msg)) # ui = Ui_class() class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code", "self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate", "QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock)", "= abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량'])", "in range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’,", "total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan", "chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : '' if chegual_quantity ==", "int, QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력 : A039423 // 알파벳 A는", "= dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\",", "meme_screen += 1 self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\":", "elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) #", "== \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\",", "str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData):", "tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 =", "chegual_quantity == '': chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\",", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는", "self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName ==", "code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip()", "tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\":", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d = float(d) e =", "\"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict", "= {} self.deposit = 0 #예수금 self.use_money = 0 #실제 투자에 사용할 금액", "order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict = 1 elif order_type ==\"신규매도\":", "sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\",", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString,", "self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련 함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString,", "signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code):", "# tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update()", "self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm})", "#3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString,", "sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\",", "방식을 파이썬에 사용할 수 있게 변환해 주는 함수 self.event_slots() # 키움과 연결하기 위한", "int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s", "+매수, -매도정정, +매수정정 not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\")", "order_quantity, order_price, hoga_dict, order_num] ) if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s", "0 #실제 투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율 self.output_deposit", "tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \" %", "for i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\")", "sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력", "self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for code in companys: screen_num =", "stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list() for", "line.split(\"\\t\") # stock_code = ls[0] # stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0])", "def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다. # f", "출력 : +(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) #", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int,", "실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인", "출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip()", "current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력 :", "first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode]", "for i in range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price)", "chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num =", "\"종목번호\") # 출력 : A039423 // 알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목", "type_dict = 2 elif order_type ==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\": type_dict", "sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\")", "sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\")", "screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if code not in", "self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034 매도일 때 g = abs(int(g)) h", "\"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\",", "Slack() #슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\") ####### event loop를", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다. # lines =", "= Jango(code) return codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for", "get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num", "if line != \"\": # ls = line.split(\"\\t\") # stock_code = ls[0] #", "현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\")", "codes: self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는", "current_price = abs(int(current_price)) stock_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\",", "stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun})", "order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\",", "sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan})", "learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률 :", "tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext", "%s - 종목명: %s - 보유수량: %s - 매입가:%s - 수익률: %s -", "== '': chegual_price = 0 else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량'])", "first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온", "buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict)", ": 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"일자\") #", "# f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을 읽어", "QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력 : A039423 // 알파벳 A는 장내주식,", "self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num)", "체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을", "/ 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit", "self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\": order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status})", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b = abs(int(b)) c", "if code in self.account_stock_dict: # dictionary 에 해당 종목이 있나 확인 pass else:", "\"수익률(%)\") # 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "# 출력: 5 default : '' if chegual_quantity == '': chegual_quantity = 0", "QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈", "int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] ) if order_success == 0: logging.debug(\"%s 전달", "1 self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif", "ret_data=list() for i in range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString, QString, int,", "전달 실패\"%order_type) return order_success # 실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList):", "= abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not in", "def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s ---", "\"종목명\") # 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode, msg)) #", "#3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\",", "딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\")", "QString)\", sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip())", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price", ": \", sCode,a, b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type", "self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price", ": \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막 주문번호 order_status =", "sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\",", "\") print(\"Kiwoom() class start.\") ####### event loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop()", "완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list =", "tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s \" %", "i, \"현재가\") # 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") # 계좌번호 반환 account_num =", "order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity = int(ok_quantity.strip()) if code in self.not_concluded_account: pass else:", "QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) #", "# 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : ''", "\"체결량\") code = code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip()", "= int(ok_quantity.strip()) if code in self.not_concluded_account: pass else: self.not_concluded_account[code] = Jango(code) tmp =", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가'])", "chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력:", "PyQt5.QtCore import * from config.errorCode import * from PyQt5.QtTest import * from config.kiwoomType", "Ui_class() class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0", "# 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString,", "in self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt =", "실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if sRealType == \"장시작시간\": fid", "A039423 // 알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm", "구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스", "대상인지 파악 if sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0", "실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게 변환해 주는 함수 self.event_slots() #", "[order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num] ) if order_success ==", "QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 :", "buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) # tmp.jango.update({'매매가능수량'", "abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때, -2034", "= self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목", "int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price", "\"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\":", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격'])", "i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530", "포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기", "value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value == '0': logging.debug(\"장 시작 전\")", "self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in", "int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k = abs(int(k)) if sCode not", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i = abs(int(i)) j =", "meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan == 0:", "# ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None):", "elif order_type ==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\": type_dict = 4 elif", "'': chegual_price = 0 else: chegual_price = int(chegual_price) chegual_quantity = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) #", "= self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code = self.dynamicCall(\"GetCommData(QString, QString,", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면", "#OCX 방식을 파이썬에 사용할 수 있게 변환해 주는 함수 self.event_slots() # 키움과 연결하기", "sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금", "learn_rate, current_price)) if code in self.account_stock_dict: # dictionary 에 해당 종목이 있나 확인", "self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) #", "int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\":", "tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun)", "login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600)", "QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price),", "j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530", "self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\"", "tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price}) # tmp.jango.update({\"매입금액\": total_chegual_price}) #", "QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호", "4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit =", "current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호'])", "0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "+(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 :", "= account_num logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를", "b) def send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan", "self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스", "i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status", "시작\") elif value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value == \"4\":", "self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def", "self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력:", "f in files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] = Jango(code) return codes", "‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’,", "있게 변환해 주는 함수 self.event_slots() # 키움과 연결하기 위한 시그널 / 슬롯 모음", "self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if", "sGubun, nItemCnt, sFidList): if int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode", "logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType() # self.slack =", "sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내,", "시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들", "= self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids, \"1\")", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString,", "= abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list() for f", "# \"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다. # lines = f.readlines()", "# stock_code = ls[0] # stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) #", "else: self.not_concluded_account[code] = Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no})", "in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a", "code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\")", "Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터 얻어오기 def", "= abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124 매수일때,", "= code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price =", "account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString,", "tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode", "elif value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분", "trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력 : A039423", "int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info()", "# 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit()", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\",", "있는 종목은 %s \" % rows) # for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango)", "# 출력 : +(-)2530 k = abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode)", "출력 : +(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) #", "order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\",", "self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들", "QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString,", "self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango)", "‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list() for i in range(cnt):", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip()", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse", "print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType,", "= int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity})", "get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\",", "order_type ==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type", "order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity", "def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) # 트랜잭션 요청 관련 이벤트", "% self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int,", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e = abs(int(e)) f =", "self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\")", "= abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price)) ########", "Exception as e: print(e) print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간", "\"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun =", "== 0: temp_screen += 1 self.screen_real_stock = str(temp_screen) if (cnt % 50) ==", "int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test() # #", "self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if", "i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i = abs(int(i))", "self.realType.REALTYPE['잔고']['종목명']) stock_name = stock_name.strip() current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['현재가']) current_price = abs(int(current_price)) stock_quan =", "sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 :", "while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -=", "QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString,", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) #", "번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할", "self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"]", "QString, int, QString)\", sTrCode, sRQName, i, \"일자\") # 출력 : 000070 start_price =", "codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] = Jango(code)", "order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price})", "QString, int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString,", "high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력 :", "딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\",", "msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s --- %s\"", "#출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목", "codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self): screen_overwrite", "int)\", sCode, fid) if value == '0': logging.debug(\"장 시작 전\") elif value ==", "int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i = abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString,", "self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) #", "from config.kiwoomType import * # from config.slack import * import logging from PyQt5.QtWidgets", "종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType", "f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['거래량']) # 출력 : +240124", "반환 account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\" % account_num) def", "stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\": learn_rate}) tmp.jango.update({\"현재가\": current_price})", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"체결량\") code = code.strip() code_nm", "sys from PyQt5.QAxContainer import * from PyQt5.QtCore import * from config.errorCode import *", "self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : '' if chegual_quantity == '':", "QString, int, QString)\", sTrCode, sRQName, i, \"종목번호\") # 출력 : A039423 // 알파벳", "elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in", "self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s - %s\" % (total_buy_money,", "QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def", "\"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext,", "모음 self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널", "# 출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인,", "tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s", "os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] =", "출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def", "sRealType, sRealData): if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작,", "= code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity =", "stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드", "int(low_price)] ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를", "--- %s\" %(sScrNo, sRQName, sTrCode, msg)) # ui = Ui_class() class Jango(): def", "QString, int, QString)\", sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money = float(self.deposit)", "QString)\", self.not_concluded_account[code]['스크린번호'], code) QTest.qWait(5000) sys.exit() elif sRealType == \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\",", "sRQName, i, \"현재가\") # 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결량']) # 출력: 5 default : '' if chegual_quantity == '': chegual_quantity", "주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅", "abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list() for f in", "code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\")", "in codes: self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에", "# 출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력:", "abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not in self.jango_dict.keys():", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip() current_price =", "\"\": # ls = line.split(\"\\t\") # stock_code = ls[0] # stock_name = ls[1]", "# stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) #", "%s\" % self.output_deposit) print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName ==", "self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_()", "self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def stop_screen_cancel(self, sScrNo=None): self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) #", "# 출력 : 3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력:", "% sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \" % rows) # for item", "넘어감\") elif value == \"4\": logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString,", "self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class", "QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money) total_profit_loss_money = self.dynamicCall(\"GetCommData(QString,", "접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문수량\") order_price =", "# self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] != 0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"]", "QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString, int, QString,", "- %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for", "# 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥", "self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'],", "가지고 있는 종목은 %s \" % rows) # for item in self.account_stock_dict.keys(): #", "self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price})", "= ls[0] # stock_name = ls[1] # stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price =", "not_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString,", "체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력 : 3 order_quan = int(order_quan) order_price", "스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용", "%s --- %s\" %(sScrNo, sRQName, sTrCode, msg)) # ui = Ui_class() class Jango():", "할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp = self.not_concluded_account[sCode] tmp.jango.update({\"종목코드\": sCode}) tmp.jango.update({\"주문번호\":", "origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update() print(\"주문체결\") print(self.not_concluded_account[sCode].jango) elif", "#예수금 self.use_money = 0 #실제 투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제", "self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b}) tmp_not_c.jango.update({\"거래량\": g}) # 현재 가지고 있는 대상인지 파악 if sCode", "= code_list.split(';')[:-1] return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이", "= Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터 얻어오기", "% (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in", "000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가", "QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다", "= self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\")", "logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code]) print(\"미체결 종목 : %s \" %", "self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default :", "tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은", "= {} self.not_concluded_account = {} self.deposit = 0 #예수금 self.use_money = 0 #실제", "total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money = int(total_buy_money)", "000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명:", "QString, int, QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode,", "%s - %s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\",", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\",", "== \"4\": logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'], code)", "매도일 때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력", "tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량':", "== '3': logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장 종료, 동시호가로 넘어감\") elif", "투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율 self.output_deposit = 0", "잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except", "total_chegual_price}) # tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고", "'' if chegual_price == '': chegual_price = 0 else: chegual_price = int(chegual_price) chegual_quantity", "준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\")", "Jango(code) tmp = self.not_concluded_account[code] tmp.jango.update({'종목코드': code}) tmp.jango.update({'종목명': code_nm}) tmp.jango.update({'주문번호': order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량':", "first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입가\") # 매입가 : 000000000054100", "screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys(): if code", "code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if", "(cnt % 50) == 0: temp_screen += 1 self.screen_real_stock = str(temp_screen) if (cnt", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 :", "market_code): ''' 종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return: '''", "# for line in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. # if line", "self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600)", "출력: 2110 default : '' if chegual_price == '': chegual_price = 0 else:", "= \"1000\" #장 시작/종료 실시간 스크린번호 ######################################## ######### 초기 셋팅 함수들 바로 실행", "tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\": order_gubun}) tmp.jango.update({\"체결가\": chegual_price}) tmp.jango.update({\"체결량\": chegual_quantity}) tmp.jango.update({\"현재가\": current_price}) tmp.update()", "QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account", "screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids,", "sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\",", "= 0 for code in screen_overwrite: temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if", "= code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name", "#print(\"실시간 주식체결 정보 : \", self.not_concluded_account[sCode][\"종목명\"],a, b) pass except Exception as e: print(\"실시간", "i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"체결량\") code", "* from PyQt5.QtCore import * from config.errorCode import * from PyQt5.QtTest import *", "+(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 :", "기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e)", "\"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s - 보유수량: %s - 매입가:%s - 수익률:", "= code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price =", "# def __init__(self): # self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret", "self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행", "print(self.not_concluded_account[sCode].jango) elif int(sGubun) == 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\",", ": %s - %s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString,", "self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price}) self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") #", "- 수익률: %s - 현재가: %s\" % ( code, code_nm, stock_quantity, buy_price, learn_rate,", "current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name}) self.jango_dict[sCode].update({\"보유수량\": stock_quan}) self.jango_dict[sCode].update({\"주문가능수량\": like_quan}) self.jango_dict[sCode].update({\"매입단가\": buy_price}) self.jango_dict[sCode].update({\"총매입가\": total_buy_price})", "print(\"예수금 : %s\" % self.output_deposit) self.stop_screen_cancel(self.screen_my_info) self.detail_account_info_event_loop.exit() elif sRQName == \"계좌평가잔고내역요청\": total_buy_money =", "int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt % 50) == 0: temp_screen += 1", "stock_price = int(ls[2].split(\"\\n\")[0]) # stock_price = abs(stock_price) # self.portfolio_stock_dict.update({stock_code:{\"종목명\":stock_name, \"현재가\":stock_price}}) # f.close() files", "해당 종목이 있나 확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity", "1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count", "self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기", "sTrCode, sRQName, i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data)", "= float(d) e = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e", "%s - 매입가:%s - 수익률: %s - 현재가: %s\" % ( code, code_nm,", "QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account = {} self.deposit", "입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list def", "= order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity =", "때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 :", "first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price =", "market_code: 시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return", "trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력 :", "함수 self.event_slots() # 키움과 연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot() # 실시간", "==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\":", "order_type ==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\": type_dict = 5 elif order_type", "% ( code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code in self.account_stock_dict: #", "temp_screen = int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt % 50) == 0: temp_screen", "# 실시간 데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if sRealType == \"장시작시간\":", "4 elif order_type ==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\": type_dict = 6", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price))", "#송수신 메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s,", "int(current_price.strip()) total_chegual_price = int(total_chegual_price.strip()) possible_quantity = int(possible_quantity.strip()) tmp = self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) #", "self.output_deposit = 0 #출력가능 금액 self.total_profit_loss_money = 0 #총평가손익금액 self.total_profit_loss_rate = 0.0 #총수익률(%)", "self.OnReceiveMsg.connect(self.msg_slot) def real_event_slot(self): self.OnReceiveRealData.connect(self.realdata_slot) # 실시간 이벤트 연결 self.OnReceiveChejanData.connect(self.chejan_slot) #종목 주문체결 관련한 이벤트", "QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret =", "as e: print(e) print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결", "메세지 get def msg_slot(self, sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드:", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) #", "tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) #", "f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f = abs(int(f))", "detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\",", "import * import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03 STOP_PROFIT_RATE =", "시그널 self.login_event_loop.exec_() # 이벤트루프 실행 def login_slot(self, err_code): logging.debug(errors(err_code)[1]) #로그인 처리가 완료됐으면 이벤트", "screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for code in self.account_stock_dict.keys(): if code not", "+= 1 self.screen_real_stock = str(temp_screen) if (cnt % 50) == 0: meme_screen +=", "( code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code in self.account_stock_dict: # dictionary", "self.realType.REALTYPE['잔고']['보유수량']) stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price =", "int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b = abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString,", "= int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money", "code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭", "종목명: %s - 보유수량: %s - 매입가:%s - 수익률: %s - 현재가: %s\"", "- 현재가: %s\" % ( code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code", "self.jango[\"스크린번호\"]=\"\" self.jango[\"주문용스크린번호\"]=\"\" self.jango[\"손익률\"]=0. # self.jango[\"평균단가\"]=0 self.jango[\"보유금액\"]=0 def update(self): #손익률 if self.jango[\"체결가\"] != 0:", "in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] =", "# 출력 : +240124 매수일때, -2034 매도일 때 g = abs(int(g)) h =", "self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간 데이터 얻어오기 def realdata_slot(self,", "chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호'])", "i, \"시가\").strip() # 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString,", "self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\")", "출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"일자\")", "QString, QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api", "\"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo,", "i, \"주문수량\") order_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문가격\") order_gubun", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString,", "전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간", "QString, QString, QString)\",sRQName,trCode, \"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_()", "4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value == '0': logging.debug(\"장", "sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력 : +(-)2530 k = abs(int(k)) if sCode not in", "실패\"%order_type) return order_success # 실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량 : 000000000000010", "send_order(self,order_type, sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict = 1", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"실시간미체결요청\", \"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_()", "= 6 if hoga_type ==\"지정가\": hoga_dict = \"00\" elif hoga_type ==\"시장가\": hoga_dict =", "int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) #", "like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\",", "동시호가로 넘어감\") elif value == \"4\": logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys():", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94 current_price", "[[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’,", "'151028' chegual_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['체결가']) # 출력: 2110 default : '' if chegual_price", "d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율']) # 출력 : +(-)12.98 d = float(d)", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력 : 000070", "self.data = None ####### 요청 스크린 번호 self.screen_my_info = \"2000\" #계좌 관련한 스크린", "준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString,", "origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\",", "출럭: 0115061 마지막 주문번호 order_status = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결", "= abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price =", "sRQName, i, \"현재가\").strip() # 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "= self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\")", "detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num)", "QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가 : 000000003450 total_chegual_price = self.dynamicCall(\"GetCommData(QString, QString,", "if sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k:", "int, int, QString, QString)\", [order_type, self.screen_meme_stock, self.account_num, type_dict, sCode, order_quantity, order_price, hoga_dict, order_num]", "#실제 투자에 사용할 금액 self.use_money_percent = 0.5 #예수금에서 실제 사용할 비율 self.output_deposit =", "like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price =", "int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money /", ": +(-)2530 j = abs(int(j)) k = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['저가']) # 출력", "sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\")", "= self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode,", "- 매입가:%s - 수익률: %s - 현재가: %s\" % ( code, code_nm, stock_quantity,", "super().__init__() self.realType = RealType() # self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class start.", "\"현재가\":stock_price}}) # f.close() files = os.listdir(\"./models/\") codes=list() for f in files: codes.append(f.replace(\".pt\",\"\")) for", "\"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\", \"opw00018\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self,", "준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\")", "current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가 :", "in companys: screen_num = self.not_concluded_account[code]['스크린번호'] fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num,", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price = int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) #", "self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid)", "order_no}) tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity})", "tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number}) tmp.jango.update({\"주문구분\":", "# tmp.jango.update({'매매가능수량' : possible_quantity}) tmp.update() logging.debug(\"sPreNext : %s\" % sPrevNext) print(\"\\n계좌에 가지고 있는", ": 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip() #", "def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.account_stock_dict = dict() self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\",", "meme_screen = int(self.screen_meme_stock) if (cnt % 50) == 0: temp_screen += 1 self.screen_real_stock", "sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "print(e) print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보 :", "j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j = abs(int(j))", "count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret =", "# 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000", "# 출력: 2110 default : '' if chegual_price == '': chegual_price = 0", "%s - 보유수량: %s - 매입가:%s - 수익률: %s - 현재가: %s\" %", "total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\",", "0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict = {}", "정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun) == 0: #주문체결 account_num =", "# print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "if (cnt % 50) == 0: meme_screen += 1 self.screen_meme_stock = str(meme_screen) if", "QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)]", "sRQName) if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다. trCode = \"opt10080\" sRQName", "sTrCode, sRQName, i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int,", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip() current_price", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력 : 000070 trading_value", "QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94 current_price =", "self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price})", "open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") # \"r\"을 인자로 던져주면 파일 내용을 읽어 오겠다는 뜻이다. #", "count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1", "0 else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price", "\"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b = self.dynamicCall(\"GetCommRealData(QString,", ": 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\") #", "50) == 0: temp_screen += 1 self.screen_real_stock = str(temp_screen) if (cnt % 50)", "elif order_type ==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\": type_dict = 6 if", "= abs(int(b)) c = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c", "self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 # 실시간", "int, QString)\", sTrCode, sRQName, i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price),", ":return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list = code_list.split(';')[:-1] return code_list def read_code(self):", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 : +(-)2530 i = abs(int(i)) j", "in lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. # if line != \"\": #", "self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account =", "# 출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절", "lines = f.readlines() #파일에 있는 내용들이 모두 읽어와 진다. # for line in", "abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\": stock_name})", "==\"매수정정\": type_dict = 5 elif order_type ==\"매도정정\": type_dict = 6 if hoga_type ==\"지정가\":", "self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\":", "self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"계좌평가잔고내역요청\",", "self.realType.REALTYPE[sRealType]['(최우선)매도호가']) # 출력 : +(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "출력: -매도, +매수 order_gubun = order_gubun.strip().lstrip('+').lstrip('-') chegual_time_str = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문/체결시간']) # 출력: '151028'", "self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\",", "= abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode}) self.jango_dict[sCode].update({\"종목명\":", "# self.slack = Slack() #슬랙 동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\")", "self.jango_dict[sCode].update({\"매도매수구분\": meme_gubun}) self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan ==", "order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s \"", "함수 #장시작 종료 세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def", "e: print(e) print(\"EXception 현재 가지고 있는 잔고 비교 정보\",self.account_stock_dict[sCode].jango) try: #print(\"실시간 주식체결 정보", "수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\",", "not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1", "sRQName, i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "int, QString)\", sTrCode, sRQName, i, \"시가\").strip() # 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString,", "######## 새로 들어온 주문이면 주문번호 할당 if sCode not in self.not_concluded_account.keys(): self.not_concluded_account[sCode]=Jango(sCode) tmp", "현재 가지고 있는 대상인지 파악 if sCode in self.account_stock_dict.keys(): try: # 스탑로스 구현", "\"opt10075\", sPrevNext, self.screen_my_info) self.get_not_concluded_account_event_loop.exec_() def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\",", "\"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot)", ": +240124 매수일때, -2034 매도일 때 g = abs(int(g)) h = self.dynamicCall(\"GetCommRealData(QString, int)\",", "float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money / 4 output_deposit =", "self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가", "출력 : defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) # 출럭: 0115061 마지막", "sRQName, i, \"거래량\").strip() # 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "10:코스닥 :param market_code: 시장코드 입력 :return: ''' code_list = self.dynamicCall(\"GetCodeListByMarket(QString)\", market_code) code_list =", "종목코드 리스트 받기 #0:장내, 10:코스닥 :param market_code: 시장코드 입력 :return: ''' code_list =", "int, QString)\", sTrCode, sRQName, 0, \"총평가손익금액\") self.total_profit_loss_money = int(total_profit_loss_money) total_profit_loss_rate = self.dynamicCall(\"GetCommData(QString, QString,", "있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매도매수구분']) meme_gubun = self.realType.REALTYPE['매도수구분'][meme_gubun]", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력 : 000070", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"고가\").strip() # 출력 : 000070 low_price", "self.total_profit_loss_rate = 0.0 #총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict", "order_number}) tmp.jango.update({\"종목명\": stock_name}) tmp.jango.update({\"주문상태\": order_status}) tmp.jango.update({\"주문수량\": order_quan}) tmp.jango.update({\"주문가격\": order_price}) tmp.jango.update({\"미체결수량\": not_chegual_quan}) tmp.jango.update({\"원주문번호\": origin_order_number})", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\" order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문번호']) #", "cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\",", "stock_name = stock_name.strip() origin_order_number = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['원주문번호']) # 출력 : defaluse : \"000000\"", "total_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun", "= self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int, int, QString, QString)\", [order_type, self.screen_meme_stock,", "sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit", "print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except", "가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0", "- 보유수량: %s - 매입가:%s - 수익률: %s - 현재가: %s\" % (", "[‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" %", "i, \"거래대금\") # 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip()) current_price = int(current_price.strip()) total_chegual_price", "lines: #줄바꿈된 내용들이 한줄 씩 읽어와진다. # if line != \"\": # ls", "= int(self.screen_real_stock) meme_screen = int(self.screen_meme_stock) if (cnt % 50) == 0: temp_screen +=", "sCode, order_quantity, order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict = 1 elif", "던져주면 파일 내용을 읽어 오겠다는 뜻이다. # lines = f.readlines() #파일에 있는 내용들이", "QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량 : 000000000000010 buy_price = self.dynamicCall(\"GetCommData(QString, QString,", "int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s - 보유수량:", "self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0", "딜레이를 준다. trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\",", "이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### ####### 계좌 관련된 변수 self.account_stock_dict", "수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\")", "code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code = code.strip()", "order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type)", "QTimer.singleShot(5000, self.get_not_concluded_account) #5초 뒤에 미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting()", "%s\" % sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \" % rows) # for", "’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’]. […]] logging.debug(\"3분봉조회 %s\" % cnt) ret_data=list()", "i in range(cnt): data = [] code = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while", "\"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\" self.jango[\"주문수량\"]=0 self.jango[\"주문가격\"]=0 self.jango[\"주문구분\"]=\"\" self.jango[\"미체결수량\"]=\"\" self.jango[\"스크린번호\"]=\"\"", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"일자\") # 출력 : 000070 start_price", "==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\": type_dict = 4 elif order_type ==\"매수정정\":", "sTrCode, sRQName, i, \"현재가\").strip() # 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int,", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price))", "\"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총매입금액\") self.total_buy_money =", "i, \"주문번호\") order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\") #", "Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip()) learn_rate = float(learn_rate.strip())", "from config.slack import * import logging from PyQt5.QtWidgets import * STOP_LOSS_RATE = 0.03", "# 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "code.strip() code_nm = code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip())", "order_status = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity", "\"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문번호\") order_status =", "QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’, ‘’].", "screen_overwrite.append(code) # 스크린번호 할당 cnt = 0 for code in screen_overwrite: temp_screen =", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가']) first_buy_price = abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\":", "# self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO)", "def __init__(self): # self.app = QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret =", "output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"출금가능금액\") self.output_deposit = int(output_deposit)", "sPrevNext): # print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int,", "sTrCode, sRQName, i, \"종목명\") order_no = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "QString, int, QString)\", sTrCode, sRQName, i, \"보유수량\") # 보유수량 : 000000000000010 buy_price =", "not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt = 0 for code in", "sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) #", "else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip()) buy_price = int(buy_price.strip())", "# 출력 : +(-)2520 e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가'])", "self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString,", "return order_success # 실시간 체결 정보 def chejan_slot(self, sGubun, nItemCnt, sFidList): if int(sGubun)", "code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다. #", "= self.account_stock_dict[code] tmp.jango.update({\"종목명\": code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\":", "count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏 기준가',self.account_stock_dict[sCode].jango['체결가']*(1+STOP_LOSS_RATE)) ret = self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price = abs(int(buy_price)) total_buy_price", "sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString, QString,", "int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를", "뒤에 미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신", "self.screen_real_stock = str(temp_screen) if (cnt % 50) == 0: meme_screen += 1 self.screen_meme_stock", "first_buy_price = abs(int(first_buy_price)) if sCode not in self.jango_dict.keys(): self.jango_dict.update({sCode:{}}) self.jango_dict[sCode].update({\"현재가\": current_price}) self.jango_dict[sCode].update({\"종목코드\": sCode})", "\"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"체결구분\", \"1\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"매매구분\", \"0\") self.dynamicCall(\"CommRqData(QString, QString, int,", "\"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 : %s\" %", "= int(order_price) not_chegual_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan =", "변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop = QEventLoop() # 예수금 요청용", "stock_quantity, buy_price, learn_rate, current_price)) if code in self.account_stock_dict: # dictionary 에 해당 종목이", ": A039423 // 알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목 code = code.strip()[1:]", "= float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money / 4 output_deposit", "sTrCode, sRQName, i, \"일자\") # 출력 : 000070 start_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "== \"주식체결\": a = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['체결시간']) # 출력 HHMMSS b =", "not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결 종목 : %s \" % self.not_concluded_account[code])", "code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에", "QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’],", "'': chegual_quantity = 0 else: chegual_quantity = int(chegual_quantity) current_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['현재가']) #", "code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\" self.jango[\"원주문번호\"]=\"\" self.jango[\"주문상태\"]=\"\"", "\"0\", self.screen_meme_stock) # ret = self.dynamicCall(\"GetCommDataEx(QString, QString)\", trCode, \"주식분봉차트\") self.calculator_event_loop.exec_() return self.data def", "== \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value =", "= self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"시장가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count elif self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count", "\"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self,", "self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"]", "trCode = \"opt10080\" sRQName = \"3분봉조회\" 수정주가구분 = 1 self.dynamicCall(\"SetInputValue(QString, QString)\", \"종목코드\", sCode)", "성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success # 실시간 체결 정보 def chejan_slot(self,", "order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update() logging.debug(\"미체결", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목명']) stock_name =", "QString, int, QString)\", sTrCode, sRQName, i, \"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "g}) # 현재 가지고 있는 대상인지 파악 if sCode in self.account_stock_dict.keys(): try: #", "abs(int(i)) j = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['시가']) # 출력 : +(-)2530 j =", "-6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price =", "print(\"Kiwoom() class start.\") ####### event loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인", "abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price =", "#종목 주문체결 관련한 이벤트 def signal_login_commConnect(self): self.dynamicCall(\"CommConnect()\") # 로그인 요청 시그널 self.login_event_loop.exec_() #", "stock_quan = int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\",", "self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code = code.strip() code_name =", "== 1: #잔고 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['종목코드'])[1:] stock_name =", "담겨있는 종목들 for code in self.portfolio_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #", "######### 초기 셋팅 함수들 바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게", "abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로", "sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString,", "가져오기 self.detail_account_info() #예수금 요청 시그널 포함 self.detail_account_mystock() #계좌평가잔고내역 요청 시그널 포함 QTimer.singleShot(5000, self.get_not_concluded_account)", "= account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"):", "\"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "self.screen_my_info = \"2000\" #계좌 관련한 스크린 번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호", ": '' if chegual_price == '': chegual_price = 0 else: chegual_price = int(chegual_price)", "in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들 for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드']", "first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan == 0: del self.jango_dict[sCode] #송수신 메세지", "self.realType.REALTYPE['주문체결']['현재가']) # 출력: -6000 current_price = abs(int(current_price)) first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매도호가']) # 출력:", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94", "code_nm.strip() order_no = int(order_no.strip()) order_status = order_status.strip() order_quantity = int(order_quantity.strip()) order_price = int(order_price.strip())", "and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑로스 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑로스 기준가',self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE))", "* self.use_money_percent self.use_money = int(use_money) self.use_money = self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString,", "# 해당 경로에 파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\", encoding=\"utf8\") #", "# 출력 : +(-)2520 c = abs(int(c)) d = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['등락율'])", "data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’,", "== 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return", "0: self.jango[\"손익률\"] = (self.jango[\"현재가\"]-self.jango[\"체결가\"])/self.jango[\"체결가\"] #보유금액 self.jango[\"보유금액\"]=self.jango[\"체결가\"]*self.jango[\"체결량\"] #내용 확인해 보자. 기존 주식과 합산 계산", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString,", "= int(self.screen_meme_stock) if (cnt % 50) == 0: temp_screen += 1 self.screen_real_stock =", "QString)\", sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int,", "sPrevNext) print(\"\\n계좌에 가지고 있는 종목은 %s \" % rows) # for item in", "account_num logging.debug(\"계좌번호 : %s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다.", "trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext): # print(\"sRQName\", sRQName) if sRQName == \"예수금상세현황요청\":", "logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type) else: logging.debug(\"%s 전달 실패\"%order_type) return order_success #", "-000000001.94 current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가", "int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() # 출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString,", "if code not in screen_overwrite: screen_overwrite.append(code) # 스크린번호 할당 cnt = 0 for", "종목들 for code in self.not_concluded_account.keys(): code = self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite:", "슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청", "if sRQName == \"예수금상세현황요청\": deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "files: codes.append(f.replace(\".pt\",\"\")) for code in codes: self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self):", "# 출럭 : 한국기업평가 stock_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['현재가']) # 출력 : +(-)2520 b = abs(int(b)) c =", "# ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self):", "QString, int, QString)\", \"예수금상세현황요청\", \"opw00001\", sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def detail_account_mystock(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다", "sRQName, i, \"저가\").strip() # 출력 : 000070 data=[int(current_price),int(volume), int(start_price), int(high_price), int(low_price)] ret_data.append(data) self.data", "시그널 / 슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect()", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\") # 현재가 : 000000003450", "\"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString,", "tr코드: %s --- %s\" %(sScrNo, sRQName, sTrCode, msg)) # ui = Ui_class() class", "self.use_money = self.use_money / 4 output_deposit = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "매입가:%s - 수익률: %s - 현재가: %s\" % ( code, code_nm, stock_quantity, buy_price,", "# [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’, ‘현재가’, ’거래량’,", "0.03 STOP_PROFIT_RATE = 0.03 # class Ui_class(): # def __init__(self): # self.app =", "ret_data.append(data) self.data = ret_data self.calculator_event_loop.exit() def multi_rq3(self, sCode, tick): QTest.qWait(3600) #3.6초마다 딜레이를 준다.", "QString, int, QString)\", sTrCode, sRQName, i, \"체결량\") code = code.strip() code_nm = code_nm.strip()", "sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’, ‘거래대금’, ‘날짜’, ‘시가’, ‘고가’, ‘저가’. ‘’], [‘’,", "\"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int, int, QString, QString)\",", "tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000}) tmp.update()", "파이썬에 사용할 수 있게 변환해 주는 함수 self.event_slots() # 키움과 연결하기 위한 시그널", "str(self.screen_meme_stock)}) elif code not in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)})", "0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s - %s\"", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문상태']) # 출력: 접수, 확인, 체결 order_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문수량']) # 출력", "= self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['총매입가']) # 계좌에 있는 종목의 총매입가 total_buy_price = int(total_buy_price) meme_gubun =", "QApplication(sys.argv) # self.kiwoom = Kiwoom() # ret = self.kiwoom.multi_test() # # self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\",", "sTrCode, sRQName, i, \"미체결수량\") ok_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "읽어 오겠다는 뜻이다. # lines = f.readlines() #파일에 있는 내용들이 모두 읽어와 진다.", "sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\")", "# 계좌번호 반환 account_num = account_list.split(';')[1] self.account_num = account_num logging.debug(\"계좌번호 : %s\" %", "def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\" self.jango[\"체결가\"]=0 self.jango[\"현재가\"]=0 self.jango[\"체결량\"]=0 #보유수량 self.jango[\"주문번호\"]=\"\"", "return codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들 for code in", "= self.send_order(\"신규매도\",sCode=sCode,order_quantity=1,order_price=b,hoga_type=\"지정가\") count -= 1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception 현재", "= self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"총수익률(%)\") self.total_profit_loss_rate = float(total_profit_loss_rate) logging.debug(\"계좌평가잔고내역요청", "= abs(int(k)) if sCode not in self.not_concluded_account: self.not_concluded_account[sCode]=Jango(sCode) tmp_not_c = self.not_concluded_account[sCode] tmp_not_c.jango.update({\"현재가\": b})", "#총수익률(%) ######################################## ######## 종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict = {} ########################", "int, QString)\", sTrCode, sRQName, 0, \"종목코드\") code = code.strip() code_name = self.dynamicCall(\"GetCommData(QString, QString,", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int,", "sTrCode, sRQName) # print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’,", "==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\":", "self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString,", "####### 계좌 관련된 변수 self.account_stock_dict = {} self.not_concluded_account = {} self.deposit = 0", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['누적거래량']) # 출력 : 240124 h = abs(int(h)) i", "\"종목코드\") code_nm = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") order_no =", "바로 실행 self.get_ocx_instance() #OCX 방식을 파이썬에 사용할 수 있게 변환해 주는 함수 self.event_slots()", "\"종목코드\", sCode) self.dynamicCall(\"SetInputValue(QString, QString)\", \"틱범위\", tick) self.dynamicCall(\"SetInputValue(QString, QString)\", \"수정주가구분\", 수정주가구분) ret = self.dynamicCall(\"CommRqData(QString,", "import pickle import sys from PyQt5.QAxContainer import * from PyQt5.QtCore import * from", "if (cnt % 50) == 0: temp_screen += 1 self.screen_real_stock = str(temp_screen) if", "# 출력 : 000070 high_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "hoga_dict, order_num] ) if order_success == 0: logging.debug(\"%s 전달 성공\"%order_type) print(\"%s 전달 성공\"%order_type)", "미체결 종목들 가져오기 실행 ######################################### # QTest.qWait(10000) self.read_code() self.screen_number_setting() QTest.qWait(5000) #실시간 수신 관련", "시작 전\") elif value == '3': logging.debug(\"장 시작\") elif value == \"2\": logging.debug(\"장", "출력 : 000070 low_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"저가\").strip()", "== \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) # print(sTrCode) # data =", "QString)\", sTrCode, sRQName, i, \"매입금액\") possible_quantity = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName,", "== 0: meme_screen += 1 self.screen_meme_stock = str(meme_screen) if code in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code].jango.update({\"스크린번호\":", "000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률", "in self.portfolio_stock_dict.keys(): self.portfolio_stock_dict[code] = Jango(code) self.portfolio_stock_dict[code].jango.update({\"스크린번호\": str(self.screen_real_stock)}) self.portfolio_stock_dict[code].jango.update({\"주문용스크린번호\": str(self.screen_meme_stock)}) cnt += 1 #", "rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows): code = self.dynamicCall(\"GetCommData(QString,", "QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString,", "= 2 elif order_type ==\"매수취소\": type_dict = 3 elif order_type ==\"매도취소\": type_dict =", "sTrCode, sRQName, 0, \"예수금\") self.deposit = int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money", "class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType() # self.slack = Slack() #슬랙", "sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\")", "저장된 api 모듈 불러오기 def event_slots(self): self.OnEventConnect.connect(self.login_slot) # 로그인 관련 이벤트 self.OnReceiveTrData.connect(self.trdata_slot) #", "= code_name.strip() current_price = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"현재가\").strip() #", "if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에 파일이 있는지 체크한다. # f = open(\"files/condition_stock.txt\", \"r\",", "code_nm}) # tmp.jango.update({\"보유수량\": stock_quantity}) tmp.jango.update({\"체결량\": stock_quantity}) # tmp.jango.update({\"매입가\": buy_price}) tmp.jango.update({\"체결가\": buy_price}) # tmp.jango.update({\"수익률(%)\":", "self.dynamicCall(\"DisconnectRealData(QString)\", sScrNo) # 스크린번호 연결 끊기 def get_code_list_by_market(self, market_code): ''' 종목코드 리스트 받기", "self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"종목명\") # 출럭 : 한국기업평가 stock_quantity", ": 3 order_quan = int(order_quan) order_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문가격']) # 출력: 21000 order_price", "데이터 얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if sRealType == \"장시작시간\": fid =", "QEventLoop() # 예수금 요청용 이벤트루프 self.calculator_event_loop = QEventLoop() self.get_not_concluded_account_event_loop = QEventLoop() ######################################### #######", "실시간 이벤트 시그널 / 슬롯 연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호", "self.deposit = int(deposit) use_money = float(self.deposit) * self.use_money_percent self.use_money = int(use_money) self.use_money =", "#3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\", self.account_num) self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\",", "int(stock_quan) like_quan = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['주문가능수량']) like_quan = int(like_quan) buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['매입단가']) buy_price", "for code in self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는 종목들", "type_dict = 1 elif order_type ==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\": type_dict", "= \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real = \"1000\" #장 시작/종료 실시간 스크린번호", "# 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i,", "current_price)) if code in self.account_stock_dict: # dictionary 에 해당 종목이 있나 확인 pass", "value == \"4\": logging.debug(\"3시30분 장 종료\") for code in self.not_concluded_account.keys(): self.dynamicCall(\"SetRealRemove(QString, QString)\", self.not_concluded_account[code]['스크린번호'],", "# print(sTrCode) # data = self.dynamicCall(\"GetCommDataEx(QString, QString)\", sTrCode, sRQName) # [[‘’, ‘현재가’, ‘거래량’,", "if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else: self.detail_account_info_event_loop.exit() elif sRQName == \"실시간미체결요청\": rows =", "####### event loop를 실행하기 위한 변수모음 self.login_event_loop = QEventLoop() #로그인 요청용 이벤트루프 self.detail_account_info_event_loop", "######## 종목 정보 가져오기 self.portfolio_stock_dict = {} self.jango_dict = {} ######################## ########################################## self.data", "code_list = code_list.split(';')[:-1] return code_list def read_code(self): # if os.path.exists(\"files/condition_stock.txt\"): # 해당 경로에", "self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self): screen_overwrite = [] #계좌평가잔고내역에 있는 종목들", "번호 self.screen_calculation_stock = \"4000\" #계산용 스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린", "screen_num, code, fids, \"1\") def get_ocx_instance(self): self.setControl(\"KHOPENAPI.KHOpenAPICtrl.1\") # 레지스트리에 저장된 api 모듈 불러오기", "%s\" % account_num) def detail_account_info(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\",", "sScrNo, sRQName, sTrCode, msg): logging.debug(\"스크린: %s, 요청이름: %s, tr코드: %s --- %s\" %(sScrNo,", "\"비밀번호\", \"0000\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"비밀번호입력매체구분\", \"00\") self.dynamicCall(\"SetInputValue(QString, QString)\", \"조회구분\", \"1\") self.dynamicCall(\"CommRqData(QString, QString, int,", "종목들 for code in self.account_stock_dict.keys(): if code not in screen_overwrite: screen_overwrite.append(code) #미체결에 있는", "order_quantity, order_price, hoga_type, order_num=\"\"): if order_type == \"신규매수\": type_dict = 1 elif order_type", "int, QString)\", sTrCode, sRQName, i, \"주문상태\") # 접수,확인,체결 order_quantity = self.dynamicCall(\"GetCommData(QString, QString, int,", "연결하기 위한 시그널 / 슬롯 모음 self.real_event_slot() # 실시간 이벤트 시그널 / 슬롯", "code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if code in self.account_stock_dict: # dictionary 에", "order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int, int, QString, QString)\", [order_type,", "출력 : 240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) #", "int(sGubun) == 0: #주문체결 account_num = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['계좌번호']) sCode = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['종목코드'])[1:] stock_name", "%s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i", "self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price = abs(int(first_buy_price)) ######## 새로 들어온 주문이면 주문번호", "스크린 번호 self.screen_real_stock = \"5000\" #종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별", "ui = Ui_class() class Jango(): def __init__(self, code): self.jango=dict() self.jango[\"종목코드\"]=code self.jango[\"종목명\"] = \"\"", "order_type ==\"신규매도\": type_dict = 2 elif order_type ==\"매수취소\": type_dict = 3 elif order_type", "있나 확인 pass else: self.account_stock_dict[code] = Jango(code) code_nm = code_nm.strip() stock_quantity = int(stock_quantity.strip())", "self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value == '0': logging.debug(\"장 시작 전\") elif value", "elif sRQName == \"계좌평가잔고내역요청\": total_buy_money = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0,", "출력 : 000070 volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip()", "동작 #print(\"kiwoom() class start. \") print(\"Kiwoom() class start.\") ####### event loop를 실행하기 위한", "self.jango_dict[sCode].update({\"(최우선)매도호가\": first_sell_price}) self.jango_dict[sCode].update({\"(최우선)매수호가\": first_buy_price}) # print(\"잔고\") # print(self.jango_dict) if stock_quan == 0: del", "self.realType.REALTYPE['주문체결']['미체결수량']) # 출력: 15, default: 0 not_chegual_quan = int(not_chegual_quan) order_gubun = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['주문구분'])", "sRQName, i, \"매입가\") # 매입가 : 000000000054100 learn_rate = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\",", "= self.not_concluded_account[code]['종목코드'] if code not in screen_overwrite: screen_overwrite.append(code) #포트폴리로에 담겨있는 종목들 for code", "%s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName)", "tmp.jango.update({'주문상태': order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호':", "세팅 self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", self.screen_start_stop_real, '', self.realType.REALTYPE['장시작시간']['장운영구분'], \"0\") def setRealReg(self, companys): for", "3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, fid) if value == '0':", "= int(order_quantity.strip()) order_price = int(order_price.strip()) order_gubun = order_gubun.strip().lstrip('+').lstrip('-') not_quantity = int(not_quantity.strip()) ok_quantity =", "# for item in self.account_stock_dict.keys(): # print(self.account_stock_dict[item].jango) if sPrevNext == \"2\": self.detail_account_mystock(sPrevNext=\"2\") else:", "240124 h = abs(int(h)) i = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['고가']) # 출력 :", "logging.debug(\"계좌평가잔고내역요청 싱글데이터 : %s - %s - %s\" % (total_buy_money, total_profit_loss_money, total_profit_loss_rate)) rows", "QString, int, QString)\", sTrCode, sRQName, i, \"매매가능수량\") logging.debug(\"종목코드: %s - 종목명: %s -", "sRQName == \"실시간미체결요청\": rows = self.dynamicCall(\"GetRepeatCnt(QString, QString)\", sTrCode, sRQName) for i in range(rows):", "QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력 : 000070 date = self.dynamicCall(\"GetCommData(QString, QString,", "%s \" % self.not_concluded_account[code].jango) self.get_not_concluded_account_event_loop.exit() ####################################### elif sRQName == \"3분봉조회\": cnt = self.dynamicCall(\"GetRepeatCnt(QString,", "int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString,", "QString, int, QString)\", sTrCode, sRQName, i, \"거래대금\") # 출력 : 000070 date =", "연결 self.signal_login_commConnect() #로그인 요청 시그널 포함 self.get_account_info() #계좌번호 가져오기 self.detail_account_info() #예수금 요청 시그널", "sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515 f = abs(int(f)) g = self.dynamicCall(\"GetCommRealData(QString, int)\",", "#종목별 할당할 스크린 번호 self.screen_meme_stock = \"6000\" #종목별 할당할 주문용스크린 번호 self.screen_start_stop_real =", "self.account_stock_dict[sCode].jango['체결가']*(1+STOP_PROFIT_RATE)<b: # 익절 count = self.account_stock_dict[sCode].jango[\"체결량\"] while count >0: print(\"스탑프로핏 가동\",self.account_stock_dict[sCode].jango['체결가'], k) print('스탑프로핏", "얻어오기 def realdata_slot(self, sCode, sRealType, sRealData): if sRealType == \"장시작시간\": fid = self.realType.REALTYPE[sRealType]['장운영구분']", "= self.realType.REALTYPE[sRealType]['장운영구분'] # (0:장시작전, 2:장종료전(20분), 3:장시작, 4,8:장종료(30분), 9:장마감) value = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode,", "출력: -6010 first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['주문체결']['(최우선)매수호가']) # 출력: -6000 first_buy_price", "%s - 현재가: %s\" % ( code, code_nm, stock_quantity, buy_price, learn_rate, current_price)) if", "1 self.account_stock_dict[sCode].jango[\"체결량\"]=count except Exception as e: print(e) print(\"EXception 현재 가지고 있는 잔고 비교", "sRQName, i, \"주문가격\") order_gubun = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"주문구분\")", "= self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['전일대비']) # 출력 : +(-)2520 c = abs(int(c)) d", "e = abs(int(e)) f = self.dynamicCall(\"GetCommRealData(QString, int)\", sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가']) # 출력 : +(-)2515", "order_status}) tmp.jango.update({'주문수량': order_quantity}) tmp.jango.update({'주문가격': order_price}) tmp.jango.update({'주문구분': order_gubun}) tmp.jango.update({'미체결수량': not_quantity}) tmp.jango.update({'체결량': ok_quantity}) tmp.jango.update({'스크린번호': 1000})", "# self.app.exec_() logging.basicConfig(filename=\"kiwoom.log\", level=logging.INFO) class Kiwoom(QAxWidget): def __init__(self): super().__init__() self.realType = RealType() #", "in self.account_stock_dict.keys(): try: # 스탑로스 구현 print(self.account_stock_dict[sCode].jango[\"종목명\"],(self.account_stock_dict[sCode].jango['체결가']-k)/self.account_stock_dict[sCode].jango['체결가']) if self.account_stock_dict[sCode].jango[\"체결량\"]>0 and self.account_stock_dict[sCode].jango['체결가']*(1-STOP_LOSS_RATE)>k: count =", "volume = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, i, \"거래량\").strip() # 출력 :", "sPrevNext, self.screen_my_info) self.detail_account_info_event_loop.exec_() def get_not_concluded_account(self, sPrevNext=\"0\"): QTest.qWait(3600) #3.6초마다 딜레이를 준다. self.dynamicCall(\"SetInputValue(QString, QString)\", \"계좌번호\",", "==\"시장가\": hoga_dict = \"03\" order_success = self.dynamicCall( \"SendOrder(QString, QString, QString, int, QString, int,", "종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다. account_list = self.dynamicCall(\"GetLoginInfo(QString)\", \"ACCNO\") #", "code in codes: self.portfolio_stock_dict[code] = Jango(code) return codes def screen_number_setting(self): screen_overwrite = []", "0, \"출금가능금액\") self.output_deposit = int(output_deposit) logging.debug(\"예수금 : %s\" % self.output_deposit) print(\"예수금 : %s\"", "int, QString)\", sTrCode, sRQName, i, \"수익률(%)\") # 수익률 : -000000001.94 current_price = self.dynamicCall(\"GetCommData(QString,", "i, \"거래량\").strip() # 출력 : 000070 trading_value = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode,", "알파벳 A는 장내주식, J는 ELW종목, Q는 ETN종목 code = code.strip()[1:] code_nm = self.dynamicCall(\"GetCommData(QString,", "fids = self.realType.REALTYPE['주식체결']['체결시간'] self.dynamicCall(\"SetRealReg(QString, QString, QString, QString)\", screen_num, code, fids, \"1\") def get_ocx_instance(self):", "code_name = self.dynamicCall(\"GetCommData(QString, QString, int, QString)\", sTrCode, sRQName, 0, \"종목명\") code_name = code_name.strip()", "#로그인 처리가 완료됐으면 이벤트 루프를 종료한다. self.login_event_loop.exit() def get_account_info(self): QTest.qWait(3600) #3.6초마다 딜레이를 준다.", "= self.realType.REALTYPE['매도수구분'][meme_gubun] first_sell_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매도호가']) first_sell_price = abs(int(first_sell_price)) first_buy_price = self.dynamicCall(\"GetChejanData(int)\", self.realType.REALTYPE['잔고']['(최우선)매수호가'])" ]
[]
[ "dataset, is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training):", "= model(X) loss = criterion(y_pred, y) for name, metric in metrics.items(): dictionary[name] +=", "y) for name, metric in metrics.items(): dictionary[name] += metric(y_pred, y) if is_training: loss.backward()", "if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value / counter for name, value", "is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for", "+= 1 y_pred = model(X) loss = criterion(y_pred, y) for name, metric in", "Train # Validate # On given arguments, data def run(model, criterion, optimizer, dataset,", "torch # Train # Validate # On given arguments, data def run(model, criterion,", "metric in metrics.items(): dictionary[name] += metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return", "bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for X,", "= criterion(y_pred, y) for name, metric in metrics.items(): dictionary[name] += metric(y_pred, y) if", "run(model, criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter =", "X, y in dataset: counter += 1 y_pred = model(X) loss = criterion(y_pred,", "collections import torch # Train # Validate # On given arguments, data def", "**metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for X, y", "= collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for X, y in dataset: counter", "counter += 1 y_pred = model(X) loss = criterion(y_pred, y) for name, metric", "# Train # Validate # On given arguments, data def run(model, criterion, optimizer,", "= 0 with torch.set_grad_enabled(is_training): for X, y in dataset: counter += 1 y_pred", "y_pred = model(X) loss = criterion(y_pred, y) for name, metric in metrics.items(): dictionary[name]", "torch.set_grad_enabled(is_training): for X, y in dataset: counter += 1 y_pred = model(X) loss", "criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter = 0", "# On given arguments, data def run(model, criterion, optimizer, dataset, is_training: bool, **metrics):", "def run(model, criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter", "dataset: counter += 1 y_pred = model(X) loss = criterion(y_pred, y) for name,", "import torch # Train # Validate # On given arguments, data def run(model,", "y in dataset: counter += 1 y_pred = model(X) loss = criterion(y_pred, y)", "in dataset: counter += 1 y_pred = model(X) loss = criterion(y_pred, y) for", "0 with torch.set_grad_enabled(is_training): for X, y in dataset: counter += 1 y_pred =", "collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for X, y in dataset: counter +=", "criterion(y_pred, y) for name, metric in metrics.items(): dictionary[name] += metric(y_pred, y) if is_training:", "dictionary[name] += metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value /", "metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value / counter for", "On given arguments, data def run(model, criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training)", "arguments, data def run(model, criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary =", "+= metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value / counter", "loss.backward() optimizer.step() optimizer.zero_grad() return {name: value / counter for name, value in dictionary.items()}", "data def run(model, criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int)", "for X, y in dataset: counter += 1 y_pred = model(X) loss =", "Validate # On given arguments, data def run(model, criterion, optimizer, dataset, is_training: bool,", "given arguments, data def run(model, criterion, optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary", "model.train(is_training) dictionary = collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for X, y in", "y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value / counter for name,", "counter = 0 with torch.set_grad_enabled(is_training): for X, y in dataset: counter += 1", "loss = criterion(y_pred, y) for name, metric in metrics.items(): dictionary[name] += metric(y_pred, y)", "optimizer, dataset, is_training: bool, **metrics): model.train(is_training) dictionary = collections.defaultdict(int) counter = 0 with", "is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value / counter for name, value in", "import collections import torch # Train # Validate # On given arguments, data", "name, metric in metrics.items(): dictionary[name] += metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad()", "metrics.items(): dictionary[name] += metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name: value", "for name, metric in metrics.items(): dictionary[name] += metric(y_pred, y) if is_training: loss.backward() optimizer.step()", "<filename>src/runner.py import collections import torch # Train # Validate # On given arguments,", "in metrics.items(): dictionary[name] += metric(y_pred, y) if is_training: loss.backward() optimizer.step() optimizer.zero_grad() return {name:", "model(X) loss = criterion(y_pred, y) for name, metric in metrics.items(): dictionary[name] += metric(y_pred,", "with torch.set_grad_enabled(is_training): for X, y in dataset: counter += 1 y_pred = model(X)", "dictionary = collections.defaultdict(int) counter = 0 with torch.set_grad_enabled(is_training): for X, y in dataset:", "1 y_pred = model(X) loss = criterion(y_pred, y) for name, metric in metrics.items():", "# Validate # On given arguments, data def run(model, criterion, optimizer, dataset, is_training:" ]
[ "def to_device(self, batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get the", "self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self,", "fo PL compatibility reasons accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector", "= self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True,", "to set training mode. When exit, recover the original training mode. Args: module:", "= None, num_classes: Optional[int] = None, dataset: Optional[str] = None, ): \"\"\" Args:", ") -> None: train_acc, mlp_loss = self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward()", "# update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss,", "representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss", "# last input is for online eval x = inputs[-1] x = x.to(device)", "Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) ->", "z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator]", "pl_module is still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device)", "Tensor, nn from torch.nn import functional as F from torch.optim import Optimizer from", "eval x = inputs[-1] x = x.to(device) y = y.to(device) return x, y", "drop_p: float = 0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int] = None, dataset:", "have 2 attributes dm = DataModule() dm.num_classes = ... # the num of", "pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str,", "distributed accelerator. The online evaluator will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4)", "online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim: int, drop_p: float", "mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1),", "from torch import Tensor, nn from torch.nn import functional as F from torch.optim", "model.z_dim = ... # the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\"", "mode. Args: module: module to set training mode mode: whether to set training", "dataset: Optional[str] = None, ): \"\"\" Args: z_dim: Representation dimension drop_p: Dropout probability", "z_dim: int, drop_p: float = 0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int] =", "self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations) # type:", "fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p", "self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None", "is for online eval x = inputs[-1] x = x.to(device) y = y.to(device)", "training mode mode: whether to set training mode (True) or evaluation mode (False).", "last input is for online eval x = inputs[-1] x = x.to(device) y", "on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo", "): with torch.no_grad(): with set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device) representations =", "from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a MLP", "torch from pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from torch", "y) return acc, mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence,", "MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer:", "Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get the labeled batch if self.dataset ==", "your model must have 1 attribute model = Model() model.z_dim = ... #", "mode: whether to set training mode (True) or evaluation mode (False). \"\"\" original_mode", "Optional[int] = num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def", "SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a MLP for fine-tuning using", "def shared_step( self, pl_module: LightningModule, batch: Sequence, ): with torch.no_grad(): with set_training(pl_module, False):", "recover the original training mode. Args: module: module to set training mode mode:", "= x.to(device) y = y.to(device) return x, y def shared_step( self, pl_module: LightningModule,", "pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits,", "set training mode mode: whether to set training mode (True) or evaluation mode", "None, ): \"\"\" Args: z_dim: Representation dimension drop_p: Dropout probability hidden_dim: Hidden dimension", "torch.nn.parallel import DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not", "num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self, trainer:", "stage: Optional[str] = None) -> None: if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes", "None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss,", "is still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) #", "on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) ->", "type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss", "# your datamodule must have 2 attributes dm = DataModule() dm.num_classes = ...", "Example:: # your datamodule must have 2 attributes dm = DataModule() dm.num_classes =", "lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device:", "def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int,", "import functional as F from torch.optim import Optimizer from torchmetrics.functional import accuracy from", "= SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility reasons", "batch) # update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\",", "1 attribute model = Model() model.z_dim = ... # the representation dim online_eval", "from pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from torch import", "int, ) -> None: train_acc, mlp_loss = self.shared_step(pl_module, batch) # update finetune weights", "accelerator. The online evaluator will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if", "self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int]", "bool): \"\"\"Context manager to set training mode. When exit, recover the original training", "return acc, mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch:", "no cover \"\"\"Attaches a MLP for fine-tuning using the standard self-supervised protocol. Example::", "= num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self,", "= None self.dataset: Optional[str] = None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] =", "elif accel.use_dp: from torch.nn.parallel import DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else:", "= batch[1] batch = labeled_batch inputs, y = batch # last input is", "Hidden dimension for the fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim =", "return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str,", "or evaluation mode (False). \"\"\" original_mode = module.training try: module.train(mode) yield module finally:", "SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a MLP for fine-tuning using the standard", "None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None: #", "Optional[str] = None, ): \"\"\" Args: z_dim: Representation dimension drop_p: Dropout probability hidden_dim:", "model must have 1 attribute model = Model() model.z_dim = ... # the", "on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx:", "self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context manager to set", "self, z_dim: int, drop_p: float = 0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int]", "The online evaluator will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state", "nn from torch.nn import functional as F from torch.optim import Optimizer from torchmetrics.functional", "Any]) -> None: self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context", "weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def", "Dict[str, Any]) -> None: self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module, mode: bool):", "cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL", "switch fo PL compatibility reasons accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else", "hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes:", "of classes in the datamodule dm.name = ... # name of the datamodule", "accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a", "Dropout probability hidden_dim: Hidden dimension for the fine-tune MLP \"\"\" super().__init__() self.z_dim =", "= torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch:", "dataloader_idx: int, ) -> None: train_acc, mlp_loss = self.shared_step(pl_module, batch) # update finetune", "if self.dataset == \"stl10\": labeled_batch = batch[1] batch = labeled_batch inputs, y =", "from typing import Any, Dict, Optional, Sequence, Tuple, Union import torch from pytorch_lightning", "batch # last input is for online eval x = inputs[-1] x =", "mlp_loss = self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc,", "DataModule() dm.num_classes = ... # the num of classes in the datamodule dm.name", "= drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] =", "the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim:", "typing import Any, Dict, Optional, Sequence, Tuple, Union import torch from pytorch_lightning import", "dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim: int, drop_p:", "int, ) -> None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True,", "trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None: if self.num_classes is", "LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module,", "= pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss =", "callback_state @contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context manager to set training mode.", "None, num_classes: Optional[int] = None, dataset: Optional[str] = None, ): \"\"\" Args: z_dim:", "drop_p: Dropout probability hidden_dim: Hidden dimension for the fine-tune MLP \"\"\" super().__init__() self.z_dim", "int, dataloader_idx: int, ) -> None: train_acc, mlp_loss = self.shared_step(pl_module, batch) # update", "( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from", "dimension for the fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim", "None: if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if self.dataset is None: self.dataset", "MLP for fine-tuning using the standard self-supervised protocol. Example:: # your datamodule must", "pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc =", "hidden_dim: Optional[int] = None, num_classes: Optional[int] = None, dataset: Optional[str] = None, ):", "contextlib import contextmanager from typing import Any, Dict, Optional, Sequence, Tuple, Union import", "input is for online eval x = inputs[-1] x = x.to(device) y =", "self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None: # must", "Optional[Dict[str, Any]] = None def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] =", "= hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None", ") self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def", "n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility reasons accel = ( trainer.accelerator_connector if", "training mode (True) or evaluation mode (False). \"\"\" original_mode = module.training try: module.train(mode)", "class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a MLP for fine-tuning using the", "pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from torch import Tensor,", "attribute model = Model() model.z_dim = ... # the representation dim online_eval =", "as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel as", "False): x, y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits", "dm.name = ... # name of the datamodule (e.g. ImageNet, STL10, CIFAR10) #", "batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get the labeled batch", "LightningModule, stage: Optional[str] = None) -> None: if self.num_classes is None: self.num_classes =", "= labeled_batch inputs, y = batch # last input is for online eval", "on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state = callback_state", "n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility reasons accel =", "DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this type of", "z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim: int, drop_p: float = 0.2, hidden_dim:", "= self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations) #", "training mode. When exit, recover the original training mode. Args: module: module to", "\"Does not support this type of distributed accelerator. The online evaluator will not", "= z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator:", "import rank_zero_warn from torch import Tensor, nn from torch.nn import functional as F", "= None def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) ->", "on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None: # must move to device after", "else: rank_zero_warn( \"Does not support this type of distributed accelerator. The online evaluator", "online evaluator will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is", "torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence,", "as F from torch.optim import Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import", "for online eval x = inputs[-1] x = x.to(device) y = y.to(device) return", "callback_state: Dict[str, Any]) -> None: self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module, mode:", "(True) or evaluation mode (False). \"\"\" original_mode = module.training try: module.train(mode) yield module", ") -> None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True)", "labeled batch if self.dataset == \"stl10\": labeled_batch = batch[1] batch = labeled_batch inputs,", "= None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]]", "dm.num_classes = ... # the num of classes in the datamodule dm.name =", "x.to(device) y = y.to(device) return x, y def shared_step( self, pl_module: LightningModule, batch:", ").to(pl_module.device) # switch fo PL compatibility reasons accel = ( trainer.accelerator_connector if hasattr(trainer,", "set training mode (True) or evaluation mode (False). \"\"\" original_mode = module.training try:", "rank_zero_warn( \"Does not support this type of distributed accelerator. The online evaluator will", "self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None self.dataset:", "= 0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int] = None, dataset: Optional[str] =", "the fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p =", "= F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end( self,", "import DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support", "Any]] = None def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None)", "int, dataloader_idx: int, ) -> None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc,", "None: self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context manager to", "\"\"\"Context manager to set training mode. When exit, recover the original training mode.", "during setup, pl_module is still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p,", "-> None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\",", "self.num_classes: Optional[int] = None self.dataset: Optional[str] = None self.num_classes: Optional[int] = num_classes self.dataset:", "accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device])", "self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer:", "self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self, trainer: Trainer,", "batch_idx: int, dataloader_idx: int, ) -> None: train_acc, mlp_loss = self.shared_step(pl_module, batch) #", "-> None: # must move to device after setup, as during setup, pl_module", "on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule,", "Optional[str] = None) -> None: if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if", "None: train_acc, mlp_loss = self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad()", "self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] =", "mode: bool): \"\"\"Context manager to set training mode. When exit, recover the original", "sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint:", "Optional[int] = None self.dataset: Optional[str] = None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str]", "Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get the labeled batch if", "self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def", "Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma:", "-> None: self._recovered_callback_state = callback_state @contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context manager", "compatibility reasons accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if", "original training mode. Args: module: module to set training mode mode: whether to", "y.to(device) return x, y def shared_step( self, pl_module: LightningModule, batch: Sequence, ): with", "# forward pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y)", "= SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim: int, drop_p: float =", "int, drop_p: float = 0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int] = None,", "hidden_dim: Hidden dimension for the fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim", "datamodule (e.g. ImageNet, STL10, CIFAR10) # your model must have 1 attribute model", "= ... # name of the datamodule (e.g. ImageNet, STL10, CIFAR10) # your", "self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int,", "dataloader_idx: int, ) -> None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False,", "... # the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__(", "nn.Module, mode: bool): \"\"\"Context manager to set training mode. When exit, recover the", "SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim: int, drop_p: float = 0.2,", "the original training mode. Args: module: module to set training mode mode: whether", "\"stl10\": labeled_batch = batch[1] batch = labeled_batch inputs, y = batch # last", "import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches", "this type of distributed accelerator. The online evaluator will not sync.\" ) self.optimizer", "y = y.to(device) return x, y def shared_step( self, pl_module: LightningModule, batch: Sequence,", "# switch fo PL compatibility reasons accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\")", "the labeled batch if self.dataset == \"stl10\": labeled_batch = batch[1] batch = labeled_batch", "self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility", "pl_module: LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state = callback_state @contextmanager def set_training(module:", "train_acc, mlp_loss = self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\",", "F from torch.optim import Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator", "model = Model() model.z_dim = ... # the representation dim online_eval = SSLOnlineEvaluator(", "dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state:", "import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a MLP for fine-tuning", "def set_training(module: nn.Module, mode: bool): \"\"\"Context manager to set training mode. When exit,", "Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: train_acc, mlp_loss =", "batch = labeled_batch inputs, y = batch # last input is for online", "-> Tuple[Tensor, Tensor]: # get the labeled batch if self.dataset == \"stl10\": labeled_batch", "after setup, as during setup, pl_module is still on cpu self.online_evaluator = SSLEvaluator(", "as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this type", "if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import", "... # the num of classes in the datamodule dm.name = ... #", "-> None: train_acc, mlp_loss = self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward() self.optimizer.step()", "self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state", "ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def", "None: # must move to device after setup, as during setup, pl_module is", "Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state = callback_state @contextmanager def", "trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator", "= self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True)", "= DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this type of distributed accelerator.", "on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict:", "sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict: return", "self.dataset == \"stl10\": labeled_batch = batch[1] batch = labeled_batch inputs, y = batch", "Args: module: module to set training mode mode: whether to set training mode", "from torch.nn.parallel import DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does", "dimension drop_p: Dropout probability hidden_dim: Hidden dimension for the fine-tune MLP \"\"\" super().__init__()", "train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module:", "y = batch # last input is for online eval x = inputs[-1]", "import torch from pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from", "Dict, Optional, Sequence, Tuple, Union import torch from pytorch_lightning import Callback, LightningModule, Trainer", "self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this type of distributed", "attributes dm = DataModule() dm.num_classes = ... # the num of classes in", "a MLP for fine-tuning using the standard self-supervised protocol. Example:: # your datamodule", "on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence,", "evaluator will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not", "functional as F from torch.optim import Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator", "self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str, torch.device])", "x, y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits =", "the datamodule (e.g. ImageNet, STL10, CIFAR10) # your model must have 1 attribute", "Any, Dict, Optional, Sequence, Tuple, Union import torch from pytorch_lightning import Callback, LightningModule,", "on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx:", "of the datamodule (e.g. ImageNet, STL10, CIFAR10) # your model must have 1", "pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer,", "Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()}", "if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif", "inputs, y = batch # last input is for online eval x =", "torch.nn import functional as F from torch.optim import Optimizer from torchmetrics.functional import accuracy", "# the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self,", "z_dim: Representation dimension drop_p: Dropout probability hidden_dim: Hidden dimension for the fine-tune MLP", "import contextmanager from typing import Any, Dict, Optional, Sequence, Tuple, Union import torch", "with torch.no_grad(): with set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1)", "classes in the datamodule dm.name = ... # name of the datamodule (e.g.", "set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass", "STL10, CIFAR10) # your model must have 1 attribute model = Model() model.z_dim", "Optional, Sequence, Tuple, Union import torch from pytorch_lightning import Callback, LightningModule, Trainer from", "mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end(", "setup, pl_module is still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim,", ") \"\"\" def __init__( self, z_dim: int, drop_p: float = 0.2, hidden_dim: Optional[int]", "must move to device after setup, as during setup, pl_module is still on", "labeled_batch = batch[1] batch = labeled_batch inputs, y = batch # last input", "ImageNet, STL10, CIFAR10) # your model must have 1 attribute model = Model()", "pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator]", "... # name of the datamodule (e.g. ImageNet, STL10, CIFAR10) # your model", "must have 1 attribute model = Model() model.z_dim = ... # the representation", "val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer,", "# must move to device after setup, as during setup, pl_module is still", "not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"])", "Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module:", "move to device after setup, as during setup, pl_module is still on cpu", "Sequence, Tuple, Union import torch from pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities", "inputs[-1] x = x.to(device) y = y.to(device) return x, y def shared_step( self,", "): \"\"\" Args: z_dim: Representation dimension drop_p: Dropout probability hidden_dim: Hidden dimension for", "# type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return acc,", "setup, as during setup, pl_module is still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim,", "representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def __init__( self, z_dim: int,", "= DataModule() dm.num_classes = ... # the num of classes in the datamodule", "shared_step( self, pl_module: LightningModule, batch: Sequence, ): with torch.no_grad(): with set_training(pl_module, False): x,", "def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None: # must move to device", "LightningModule, batch: Sequence, ): with torch.no_grad(): with set_training(pl_module, False): x, y = self.to_device(batch,", "__init__( self, z_dim: int, drop_p: float = 0.2, hidden_dim: Optional[int] = None, num_classes:", "self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) ->", "sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"])", "not support this type of distributed accelerator. The online evaluator will not sync.\"", "support this type of distributed accelerator. The online evaluator will not sync.\" )", "forward pass mlp_logits = self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc", "outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: train_acc, mlp_loss", "mode mode: whether to set training mode (True) or evaluation mode (False). \"\"\"", "else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP", "batch: Sequence, ): with torch.no_grad(): with set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device)", "LightningModule) -> None: # must move to device after setup, as during setup,", "DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel as DP", "Tuple[Tensor, Tensor]: # get the labeled batch if self.dataset == \"stl10\": labeled_batch =", "self.num_classes = trainer.datamodule.num_classes if self.dataset is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer:", "update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True,", "on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx:", "mode (True) or evaluation mode (False). \"\"\" original_mode = module.training try: module.train(mode) yield", "CIFAR10) # your model must have 1 attribute model = Model() model.z_dim =", "num_classes: Optional[int] = None, dataset: Optional[str] = None, ): \"\"\" Args: z_dim: Representation", "None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]:", "batch_idx: int, dataloader_idx: int, ) -> None: val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\",", "self-supervised protocol. Example:: # your datamodule must have 2 attributes dm = DataModule()", "pragma: no cover \"\"\"Attaches a MLP for fine-tuning using the standard self-supervised protocol.", "val_acc, mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False,", "# the num of classes in the datamodule dm.name = ... # name", "acc, mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence,", "Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self, trainer: Trainer, pl_module:", "hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel", "trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None: # must move to", "= None, dataset: Optional[str] = None, ): \"\"\" Args: z_dim: Representation dimension drop_p:", "\"\"\" Args: z_dim: Representation dimension drop_p: Dropout probability hidden_dim: Hidden dimension for the", "trainer.datamodule.num_classes if self.dataset is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module:", "accel.use_dp: from torch.nn.parallel import DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn(", "checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer:", "as during setup, pl_module is still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes,", "Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None self.dataset: Optional[str]", "set_training(module: nn.Module, mode: bool): \"\"\"Context manager to set training mode. When exit, recover", "= inputs[-1] x = x.to(device) y = y.to(device) return x, y def shared_step(", "import Any, Dict, Optional, Sequence, Tuple, Union import torch from pytorch_lightning import Callback,", "contextmanager from typing import Any, Dict, Optional, Sequence, Tuple, Union import torch from", "DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this", "batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self,", "# your model must have 1 attribute model = Model() model.z_dim = ...", "Args: z_dim: Representation dimension drop_p: Dropout probability hidden_dim: Hidden dimension for the fine-tune", "import Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): #", "Representation dimension drop_p: Dropout probability hidden_dim: Hidden dimension for the fine-tune MLP \"\"\"", "Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: val_acc, mlp_loss =", "DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel", "labeled_batch inputs, y = batch # last input is for online eval x", "to set training mode mode: whether to set training mode (True) or evaluation", "the standard self-supervised protocol. Example:: # your datamodule must have 2 attributes dm", "= None) -> None: if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if self.dataset", "trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, )", "y) acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end( self, trainer: Trainer,", "mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end(", "the num of classes in the datamodule dm.name = ... # name of", "x = x.to(device) y = y.to(device) return x, y def shared_step( self, pl_module:", "for fine-tuning using the standard self-supervised protocol. Example:: # your datamodule must have", "dm = DataModule() dm.num_classes = ... # the num of classes in the", "= Model() model.z_dim = ... # the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim", "mlp_loss = self.shared_step(pl_module, batch) pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True,", "from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no", "= None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None self.dataset: Optional[str] =", "SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility reasons accel", "self.shared_step(pl_module, batch) # update finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False)", "to device after setup, as during setup, pl_module is still on cpu self.online_evaluator", "Optional[str] = None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str,", "still on cpu self.online_evaluator = SSLEvaluator( n_input=self.z_dim, n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch", "torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel", "DP(self.online_evaluator, device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this type of distributed accelerator. The", "self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get", "\"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer]", "@contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context manager to set training mode. When", "Optional[int] = None, num_classes: Optional[int] = None, dataset: Optional[str] = None, ): \"\"\"", "Tensor]: # get the labeled batch if self.dataset == \"stl10\": labeled_batch = batch[1]", "= y.to(device) return x, y def shared_step( self, pl_module: LightningModule, batch: Sequence, ):", "torch import Tensor, nn from torch.nn import functional as F from torch.optim import", "from torch.optim import Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class", "on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs:", "acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module:", "on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(),", "exit, recover the original training mode. Args: module: module to set training mode", "training mode. Args: module: module to set training mode mode: whether to set", "= ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp:", "trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state = callback_state @contextmanager", "datamodule must have 2 attributes dm = DataModule() dm.num_classes = ... # the", "is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None:", "from torch.nn import functional as F from torch.optim import Optimizer from torchmetrics.functional import", "self, pl_module: LightningModule, batch: Sequence, ): with torch.no_grad(): with set_training(pl_module, False): x, y", "rank_zero_warn from torch import Tensor, nn from torch.nn import functional as F from", "probability hidden_dim: Hidden dimension for the fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim", "None self.dataset: Optional[str] = None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] = dataset", "pytorch_lightning.utilities import rank_zero_warn from torch import Tensor, nn from torch.nn import functional as", "pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def", "float = 0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int] = None, dataset: Optional[str]", "trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel", "# name of the datamodule (e.g. ImageNet, STL10, CIFAR10) # your model must", "Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None: if self.num_classes is None:", "= accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule,", "trainer: Trainer, pl_module: LightningModule) -> None: # must move to device after setup,", "{\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any])", "torch.optim import Optimizer from torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback):", "= ... # the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim ) \"\"\" def", "self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel as DP self.online_evaluator", "batch[1] batch = labeled_batch inputs, y = batch # last input is for", "mode. When exit, recover the original training mode. Args: module: module to set", "x = inputs[-1] x = x.to(device) y = y.to(device) return x, y def", "p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility reasons accel = ( trainer.accelerator_connector", "mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any])", "None def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None:", "F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end( self, trainer:", "on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module:", "Trainer, pl_module: LightningModule) -> None: # must move to device after setup, as", "batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: train_acc, mlp_loss = self.shared_step(pl_module,", "import Tensor, nn from torch.nn import functional as F from torch.optim import Optimizer", "def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) -> None: self._recovered_callback_state =", "= callback_state @contextmanager def set_training(module: nn.Module, mode: bool): \"\"\"Context manager to set training", "Sequence, ): with torch.no_grad(): with set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device) representations", "\"\"\" def __init__( self, z_dim: int, drop_p: float = 0.2, hidden_dim: Optional[int] =", "module to set training mode mode: whether to set training mode (True) or", "DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel as DP self.online_evaluator = DP(self.online_evaluator,", "\"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as", "using the standard self-supervised protocol. Example:: # your datamodule must have 2 attributes", "return x, y def shared_step( self, pl_module: LightningModule, batch: Sequence, ): with torch.no_grad():", "trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\":", "datamodule dm.name = ... # name of the datamodule (e.g. ImageNet, STL10, CIFAR10)", "if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if self.dataset is None: self.dataset =", "cover \"\"\"Attaches a MLP for fine-tuning using the standard self-supervised protocol. Example:: #", "(e.g. ImageNet, STL10, CIFAR10) # your model must have 1 attribute model =", "n_classes=self.num_classes, p=self.drop_p, n_hidden=self.hidden_dim, ).to(pl_module.device) # switch fo PL compatibility reasons accel = (", "pl_module: LightningModule, batch: Sequence, ): with torch.no_grad(): with set_training(pl_module, False): x, y =", "import Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from torch import Tensor, nn", "LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from torch import Tensor, nn from torch.nn", "# pragma: no cover \"\"\"Attaches a MLP for fine-tuning using the standard self-supervised", "self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None self.dataset: Optional[str] = None self.num_classes:", "== \"stl10\": labeled_batch = batch[1] batch = labeled_batch inputs, y = batch #", "pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence,", "finetune weights mlp_loss.backward() self.optimizer.step() self.optimizer.zero_grad() pl_module.log(\"online_train_acc\", train_acc, on_step=True, on_epoch=False) pl_module.log(\"online_train_loss\", mlp_loss, on_step=True, on_epoch=False)", "Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None self.dataset: Optional[str] = None self.num_classes: Optional[int]", "of distributed accelerator. The online evaluator will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(),", "\"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule, callback_state: Dict[str, Any]) -> None:", "= DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel as DP self.online_evaluator =", "if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str,", "accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp:", "= dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self, trainer: Trainer, pl_module: LightningModule,", "pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover \"\"\"Attaches a MLP for", "Union import torch from pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn", "online eval x = inputs[-1] x = x.to(device) y = y.to(device) return x,", "protocol. Example:: # your datamodule must have 2 attributes dm = DataModule() dm.num_classes", "have 1 attribute model = Model() model.z_dim = ... # the representation dim", "standard self-supervised protocol. Example:: # your datamodule must have 2 attributes dm =", "Sequence, batch_idx: int, dataloader_idx: int, ) -> None: val_acc, mlp_loss = self.shared_step(pl_module, batch)", "device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import DataParallel as DP self.online_evaluator = DP(self.online_evaluator, device_ids=[pl_module.device])", "= ... # the num of classes in the datamodule dm.name = ...", "Trainer from pytorch_lightning.utilities import rank_zero_warn from torch import Tensor, nn from torch.nn import", "is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str, torch.device]) ->", "reasons accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed:", "None) -> None: if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if self.dataset is", "Sequence, batch_idx: int, dataloader_idx: int, ) -> None: train_acc, mlp_loss = self.shared_step(pl_module, batch)", "accuracy(mlp_logits.softmax(-1), y) return acc, mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs:", "Model() model.z_dim = ... # the representation dim online_eval = SSLOnlineEvaluator( z_dim=model.z_dim )", "is None: self.num_classes = trainer.datamodule.num_classes if self.dataset is None: self.dataset = trainer.datamodule.name def", "type of distributed accelerator. The online evaluator will not sync.\" ) self.optimizer =", "2 attributes dm = DataModule() dm.num_classes = ... # the num of classes", "None: self.num_classes = trainer.datamodule.num_classes if self.dataset is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self,", "super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer: Optional[Optimizer] =", "batch if self.dataset == \"stl10\": labeled_batch = batch[1] batch = labeled_batch inputs, y", "mlp_loss, on_step=True, on_epoch=False) def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch:", "x, y def shared_step( self, pl_module: LightningModule, batch: Sequence, ): with torch.no_grad(): with", "pl_module.log(\"online_val_acc\", val_acc, on_step=False, on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer:", "\"\"\"Attaches a MLP for fine-tuning using the standard self-supervised protocol. Example:: # your", "outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: val_acc, mlp_loss", "y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward pass mlp_logits = self.online_evaluator(representations)", "def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule, checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\":", "num of classes in the datamodule dm.name = ... # name of the", "self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: #", "manager to set training mode. When exit, recover the original training mode. Args:", "device_ids=[pl_module.device]) else: rank_zero_warn( \"Does not support this type of distributed accelerator. The online", "self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str]", "device after setup, as during setup, pl_module is still on cpu self.online_evaluator =", "y def shared_step( self, pl_module: LightningModule, batch: Sequence, ): with torch.no_grad(): with set_training(pl_module,", "self.dataset: Optional[str] = None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state:", ") if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator =", "None self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] =", "-> None: if self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if self.dataset is None:", "Tuple, Union import torch from pytorch_lightning import Callback, LightningModule, Trainer from pytorch_lightning.utilities import", "pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None:", "When exit, recover the original training mode. Args: module: module to set training", "set training mode. When exit, recover the original training mode. Args: module: module", "Callback, LightningModule, Trainer from pytorch_lightning.utilities import rank_zero_warn from torch import Tensor, nn from", "evaluation mode (False). \"\"\" original_mode = module.training try: module.train(mode) yield module finally: module.train(original_mode)", "from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from", "self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self,", "device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get the labeled batch if self.dataset", "dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None def setup(self, trainer: Trainer, pl_module: LightningModule, stage:", "= batch # last input is for online eval x = inputs[-1] x", "= trainer.datamodule.num_classes if self.dataset is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer,", "if accel.is_distributed: if accel.use_ddp: from torch.nn.parallel import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator,", "fine-tuning using the standard self-supervised protocol. Example:: # your datamodule must have 2", "get the labeled batch if self.dataset == \"stl10\": labeled_batch = batch[1] batch =", "will not sync.\" ) self.optimizer = torch.optim.Adam(self.online_evaluator.parameters(), lr=1e-4) if self._recovered_callback_state is not None:", "def on_validation_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int,", "to set training mode (True) or evaluation mode (False). \"\"\" original_mode = module.training", "torch.device]) -> Tuple[Tensor, Tensor]: # get the labeled batch if self.dataset == \"stl10\":", "Optional[int] = None, dataset: Optional[str] = None, ): \"\"\" Args: z_dim: Representation dimension", "None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None self.dataset: Optional[str] = None", "LightningModule, checkpoint: Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self,", "def __init__( self, z_dim: int, drop_p: float = 0.2, hidden_dim: Optional[int] = None,", "LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: val_acc,", "self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y) return", "torch.no_grad(): with set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) #", "name of the datamodule (e.g. ImageNet, STL10, CIFAR10) # your model must have", "the datamodule dm.name = ... # name of the datamodule (e.g. ImageNet, STL10,", "your datamodule must have 2 attributes dm = DataModule() dm.num_classes = ... #", "to_device(self, batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor, Tensor]: # get the labeled", "whether to set training mode (True) or evaluation mode (False). \"\"\" original_mode =", "module: module to set training mode mode: whether to set training mode (True)", "setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None: if self.num_classes", "-> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer, pl_module: LightningModule,", "= self.online_evaluator(representations) # type: ignore[operator] mlp_loss = F.cross_entropy(mlp_logits, y) acc = accuracy(mlp_logits.softmax(-1), y)", "= trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) -> None: # must move", "not None: self.online_evaluator.load_state_dict(self._recovered_callback_state[\"state_dict\"]) self.optimizer.load_state_dict(self._recovered_callback_state[\"optimizer_state\"]) def to_device(self, batch: Sequence, device: Union[str, torch.device]) -> Tuple[Tensor,", "def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None: if", "in the datamodule dm.name = ... # name of the datamodule (e.g. ImageNet,", "accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector ) if accel.is_distributed: if", "0.2, hidden_dim: Optional[int] = None, num_classes: Optional[int] = None, dataset: Optional[str] = None,", "on_epoch=True, sync_dist=True) pl_module.log(\"online_val_loss\", mlp_loss, on_step=False, on_epoch=True, sync_dist=True) def on_save_checkpoint(self, trainer: Trainer, pl_module: LightningModule,", "self.num_classes is None: self.num_classes = trainer.datamodule.num_classes if self.dataset is None: self.dataset = trainer.datamodule.name", "batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: val_acc, mlp_loss = self.shared_step(pl_module,", "PL compatibility reasons accel = ( trainer.accelerator_connector if hasattr(trainer, \"accelerator_connector\") else trainer._accelerator_connector )", "# get the labeled batch if self.dataset == \"stl10\": labeled_batch = batch[1] batch", "= None, ): \"\"\" Args: z_dim: Representation dimension drop_p: Dropout probability hidden_dim: Hidden", "if self.dataset is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule)", "<filename>pl_bolts/callbacks/ssl_online.py from contextlib import contextmanager from typing import Any, Dict, Optional, Sequence, Tuple,", "= None self.num_classes: Optional[int] = None self.dataset: Optional[str] = None self.num_classes: Optional[int] =", "torchmetrics.functional import accuracy from pl_bolts.models.self_supervised.evaluator import SSLEvaluator class SSLOnlineEvaluator(Callback): # pragma: no cover", "from contextlib import contextmanager from typing import Any, Dict, Optional, Sequence, Tuple, Union", "None, dataset: Optional[str] = None, ): \"\"\" Args: z_dim: Representation dimension drop_p: Dropout", "for the fine-tune MLP \"\"\" super().__init__() self.z_dim = z_dim self.hidden_dim = hidden_dim self.drop_p", "LightningModule, outputs: Sequence, batch: Sequence, batch_idx: int, dataloader_idx: int, ) -> None: train_acc,", "import DistributedDataParallel as DDP self.online_evaluator = DDP(self.online_evaluator, device_ids=[pl_module.device]) elif accel.use_dp: from torch.nn.parallel import", "pl_module: LightningModule) -> None: # must move to device after setup, as during", "drop_p self.optimizer: Optional[Optimizer] = None self.online_evaluator: Optional[SSLEvaluator] = None self.num_classes: Optional[int] = None", "mlp_loss def on_train_batch_end( self, trainer: Trainer, pl_module: LightningModule, outputs: Sequence, batch: Sequence, batch_idx:", "with set_training(pl_module, False): x, y = self.to_device(batch, pl_module.device) representations = pl_module(x).flatten(start_dim=1) # forward", "Dict[str, Any]) -> dict: return {\"state_dict\": self.online_evaluator.state_dict(), \"optimizer_state\": self.optimizer.state_dict()} def on_load_checkpoint(self, trainer: Trainer,", "pl_module: LightningModule, stage: Optional[str] = None) -> None: if self.num_classes is None: self.num_classes", "from pytorch_lightning.utilities import rank_zero_warn from torch import Tensor, nn from torch.nn import functional", "must have 2 attributes dm = DataModule() dm.num_classes = ... # the num", "self.dataset is None: self.dataset = trainer.datamodule.name def on_pretrain_routine_start(self, trainer: Trainer, pl_module: LightningModule) ->", "None self.num_classes: Optional[int] = None self.dataset: Optional[str] = None self.num_classes: Optional[int] = num_classes", "self.num_classes: Optional[int] = num_classes self.dataset: Optional[str] = dataset self._recovered_callback_state: Optional[Dict[str, Any]] = None" ]
[]
[ "(c) 2018 <NAME> # Copyright (c) 2018 <NAME> # # Distributed under the", "phylanx import Phylanx import numpy as np @Phylanx def foo(): local_a = np.array((2,", "(c) 2018 <NAME> # # Distributed under the Boost Software License, Version 1.0.", "as np @Phylanx def foo(): local_a = np.array((2, 1)) local_a[0] += 55 return", "# Copyright (c) 2018 <NAME> # # Distributed under the Boost Software License,", "numpy as np @Phylanx def foo(): local_a = np.array((2, 1)) local_a[0] += 55", "Version 1.0. (See accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx", "LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy as np", "= np.array((2, 1)) local_a[0] += 55 return local_a assert (np.array((57, 1)) == foo()).any()", "at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy as np @Phylanx def foo():", "2018 <NAME> # # Distributed under the Boost Software License, Version 1.0. (See", "<NAME> # Copyright (c) 2018 <NAME> # # Distributed under the Boost Software", "2018 <NAME> # Copyright (c) 2018 <NAME> # # Distributed under the Boost", "the Boost Software License, Version 1.0. (See accompanying # file LICENSE_1_0.txt or copy", "np @Phylanx def foo(): local_a = np.array((2, 1)) local_a[0] += 55 return local_a", "copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy as np @Phylanx def", "# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy", "@Phylanx def foo(): local_a = np.array((2, 1)) local_a[0] += 55 return local_a assert", "file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy as", "from phylanx import Phylanx import numpy as np @Phylanx def foo(): local_a =", "(See accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx", "or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy as np @Phylanx", "1.0. (See accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import", "import numpy as np @Phylanx def foo(): local_a = np.array((2, 1)) local_a[0] +=", "def foo(): local_a = np.array((2, 1)) local_a[0] += 55 return local_a assert (np.array((57,", "Copyright (c) 2018 <NAME> # # Distributed under the Boost Software License, Version", "import Phylanx import numpy as np @Phylanx def foo(): local_a = np.array((2, 1))", "foo(): local_a = np.array((2, 1)) local_a[0] += 55 return local_a assert (np.array((57, 1))", "accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import", "# Copyright (c) 2018 <NAME> # Copyright (c) 2018 <NAME> # # Distributed", "Distributed under the Boost Software License, Version 1.0. (See accompanying # file LICENSE_1_0.txt", "Boost Software License, Version 1.0. (See accompanying # file LICENSE_1_0.txt or copy at", "under the Boost Software License, Version 1.0. (See accompanying # file LICENSE_1_0.txt or", "# Distributed under the Boost Software License, Version 1.0. (See accompanying # file", "<NAME> # # Distributed under the Boost Software License, Version 1.0. (See accompanying", "License, Version 1.0. (See accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) from", "http://www.boost.org/LICENSE_1_0.txt) from phylanx import Phylanx import numpy as np @Phylanx def foo(): local_a", "Software License, Version 1.0. (See accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)", "Phylanx import numpy as np @Phylanx def foo(): local_a = np.array((2, 1)) local_a[0]", "Copyright (c) 2018 <NAME> # Copyright (c) 2018 <NAME> # # Distributed under", "# # Distributed under the Boost Software License, Version 1.0. (See accompanying #", "local_a = np.array((2, 1)) local_a[0] += 55 return local_a assert (np.array((57, 1)) ==" ]
[ "except: raise Exception('Fail to load module %s' % modname) @pytest.fixture(scope=\"session\") def utils(): return", "= test_settings yield ixload_settings def getTestClass(*args, **kwargs): if test_type: modname = test_type.lower() +", "raise Exception('Fail to load module %s' % modname) @pytest.fixture(scope=\"session\") def utils(): return util", "if test_type: modname = test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail to", "modname = test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail to load module", "test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1", "slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity", "port list ] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] }", "tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = {", "TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if", "'dpu': tb } # Helper Functions def create_test_settings(): # TEST CONFIG test_settings =", "as IxLoadUtils from ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant from", "import CREDENTIALS as CR from testbed import TESTBED as TB TB[\"CR\"] = CR", "import IxLoadTestSettings as TestSettings from ixload import IxLoadUtils as IxLoadUtils from ixload import", "\"\"\"Create and return testbed information\"\"\" from credentials import CREDENTIALS as CR from testbed", "fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() + \".\" + test_type.lower()", "create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings =", "targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed information\"\"\" from credentials import CREDENTIALS", "from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def", "as util from ixload import IxLoadTestSettings as TestSettings from ixload import IxLoadUtils as", "Helper Functions def create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr']", "return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return", "CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\"", "return connection test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url']", "cls(**tbinfo) except: raise Exception('Fail to load module %s' % modname) @pytest.fixture(scope=\"session\") def utils():", "#test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect,", "return testbed information\"\"\" from credentials import CREDENTIALS as CR from testbed import TESTBED", "to load module %s' % modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def", "ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0,", "tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: { community name :", "'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } # Helper Functions def", "as IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),", "port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { #", "\"Test\") return cls(*args, **kwargs) except: raise Exception('Fail to load module %s' % modname)", "raise Exception('Fail to load module %s' % modname) try: imod = importlib.import_module(modname) cls", "slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return", "TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type']", "create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen':", "modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb", "= { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } # Helper", "tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } # Helper Functions def create_test_settings():", "tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } # Helper Functions def create_test_settings(): # TEST", "TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1]", "= tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 =", "import json import os import sys from pprint import pprint as pp import", "testbed import TESTBED as TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def", "return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname =", "pp import pytest import utils as util from ixload import IxLoadTestSettings as TestSettings", "= tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection", "SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\")", "TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion =", "tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection =", "= {} tb = tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan':", "def create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion =", "importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs) except: raise Exception('Fail", "CR from testbed import TESTBED as TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\",", "TESTBED as TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type", "= tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: { community name : [ port", "ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args, **kwargs): if test_type: modname = test_type.lower()", "import IxLoadUtils as IxLoadUtils from ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy import", "IxLoadTestSettings as TestSettings from ixload import IxLoadUtils as IxLoadUtils from ixload import IxRestUtils", "TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and", "ixload_settings = {} tb = tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'],", "TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower()", "%s' % modname) try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\")", "json import os import sys from pprint import pprint as pp import pytest", "= TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 =", "from ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import", "name : [ port list ] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1,", "test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings = create_test_settings() connection = create_session(test_settings)", "slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: { community", "scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() + \".\"", "else: raise Exception('Fail to load module %s' % modname) try: imod = importlib.import_module(modname)", "def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0]", "test_settings.portListPerCommunity = { # format: { community name : [ port list ]", "{ 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } # Helper Functions", "} \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList = tg['tgen'][0][0]", "ixload import IxLoadUtils as IxLoadUtils from ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy", "= session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args, **kwargs): if test_type: modname", "+ \"Test\") return cls(**tbinfo) except: raise Exception('Fail to load module %s' % modname)", "%s' % modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings =", "= \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 =", "utils as util from ixload import IxLoadTestSettings as TestSettings from ixload import IxLoadUtils", "#ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args, **kwargs): if test_type:", "\"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2]", "= CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type:", "importlib import json import os import sys from pprint import pprint as pp", "ixload_settings def getTestClass(*args, **kwargs): if test_type: modname = test_type.lower() + \".\" + test_type.lower()", "= getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs) except: raise Exception('Fail to load", "information\"\"\" from credentials import CREDENTIALS as CR from testbed import TESTBED as TB", "to load module %s' % modname) try: imod = importlib.import_module(modname) cls = getattr(imod,", "tb['vxlan'], 'dpu': tb } # Helper Functions def create_test_settings(): # TEST CONFIG test_settings", "targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return", "as CR from testbed import TESTBED as TB TB[\"CR\"] = CR return TB", "# TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion", "test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2", "= tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { # format:", "[(1, slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"]", "import TESTBED as TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo):", "community name : [ port list ] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\":", "ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform", "util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0] tg = {", ": [ port list ] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2,", "\"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed information\"\"\" from", "port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings", "def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() + \".\" +", "sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed information\"\"\" from credentials import", "% modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {}", "@pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb =", "] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList =", "[chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort,", "test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2", "format: { community name : [ port list ] } \"Traffic1@Network1\": [(1, slot1,", "\"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList", "{ community name : [ port list ] } \"Traffic1@Network1\": [(1, slot1, port1)],", "{} tb = tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'],", "create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield ixload_settings", "import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create", "# Helper Functions def create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer =", "test_settings yield ixload_settings def getTestClass(*args, **kwargs): if test_type: modname = test_type.lower() + \".\"", "connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args, **kwargs): if", "tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb }", "modname) try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(*args,", "credentials import CREDENTIALS as CR from testbed import TESTBED as TB TB[\"CR\"] =", "tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: {", "version=test_settings.apiVersion ) return connection test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] =", "import pprint as pp import pytest import utils as util from ixload import", "IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir =", "'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } # Helper Functions def create_test_settings(): #", "[(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList =", "**kwargs): if test_type: modname = test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail", "def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings", "@pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed information\"\"\" from credentials import CREDENTIALS as", "tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2]", ") return connection test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection", "CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname", "= { # format: { community name : [ port list ] }", "IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings = create_test_settings() connection =", "test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey)", "% modname) try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return", "\".\" + test_type.lower() else: raise Exception('Fail to load module %s' % modname) try:", "CREDENTIALS as CR from testbed import TESTBED as TB TB[\"CR\"] = CR return", "from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\"))", "+ test_type.lower() else: raise Exception('Fail to load module %s' % modname) try: imod", "import IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir", "getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except: raise Exception('Fail to load module %s'", "ixload import IxLoadTestSettings as TestSettings from ixload import IxLoadUtils as IxLoadUtils from ixload", "tb = tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu':", "tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: { community name : [ port list", "\"Traffic2@Network2\": [(1, slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList =", "TestSettings from ixload import IxLoadUtils as IxLoadUtils from ixload import IxRestUtils as IxRestUtils", "slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList]", "return cls(**tbinfo) except: raise Exception('Fail to load module %s' % modname) @pytest.fixture(scope=\"session\") def", "as pp import pytest import utils as util from ixload import IxLoadTestSettings as", "module %s' % modname) try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() +", "= tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: { community name", "IxLoadUtils as IxLoadUtils from ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant", "import pytest import utils as util from ixload import IxLoadTestSettings as TestSettings from", "port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList", "pprint as pp import pytest import utils as util from ixload import IxLoadTestSettings", "util from ixload import IxLoadTestSettings as TestSettings from ixload import IxLoadUtils as IxLoadUtils", "try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except:", "from ixload import IxLoadUtils as IxLoadUtils from ixload import IxRestUtils as IxRestUtils from", "testbed information\"\"\" from credentials import CREDENTIALS as CR from testbed import TESTBED as", "and return testbed information\"\"\" from credentials import CREDENTIALS as CR from testbed import", "[ port list ] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)]", "= connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args, **kwargs):", "test_type.title() + \"Test\") return cls(*args, **kwargs) except: raise Exception('Fail to load module %s'", "getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs) except: raise Exception('Fail to load module", "= os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed", "chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings):", "from ixload import IxLoadTestSettings as TestSettings from ixload import IxLoadUtils as IxLoadUtils from", "import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir)", "port2 = tg['tgen'][1][2] test_settings.portListPerCommunity = { # format: { community name : [", "[\"10.36.79.165\"] return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion )", "as TestSettings from ixload import IxLoadUtils as IxLoadUtils from ixload import IxRestUtils as", "load module %s' % modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo):", "test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection(", "pprint import pprint as pp import pytest import utils as util from ixload", "os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed information\"\"\"", "} chassisList = tg['tgen'][0][0] test_settings.chassisList = [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def", "import os import sys from pprint import pprint as pp import pytest import", "import importlib import json import os import sys from pprint import pprint as", "@pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() +", "os import sys from pprint import pprint as pp import pytest import utils", "\"Test\") return cls(**tbinfo) except: raise Exception('Fail to load module %s' % modname) @pytest.fixture(scope=\"session\")", "\"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request): \"\"\"Create and return testbed information\"\"\" from credentials", "Functions def create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion", "'vxlan': tb['vxlan'], 'dpu': tb } # Helper Functions def create_test_settings(): # TEST CONFIG", "= \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 =", "= test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail to load module %s'", "tbinfo(request): \"\"\"Create and return testbed information\"\"\" from credentials import CREDENTIALS as CR from", "create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] =", "Exception('Fail to load module %s' % modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture", "test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail to load module %s' %", "imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs) except:", "as TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\") def fixture_smartnics(tbinfo): test_type =", "def create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'],", "imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except: raise", "connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings = create_test_settings()", "test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url", "from testbed import TESTBED as TB TB[\"CR\"] = CR return TB @pytest.fixture(name=\"smartnics\", scope=\"session\")", "def tbinfo(request): \"\"\"Create and return testbed information\"\"\" from credentials import CREDENTIALS as CR", "@pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0] tg = { 'chassis_list':", "# format: { community name : [ port list ] } \"Traffic1@Network1\": [(1,", "from pprint import pprint as pp import pytest import utils as util from", "httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection']", "= getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except: raise Exception('Fail to load module", "tb } # Helper Functions def create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings()", "cls = getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except: raise Exception('Fail to load", "ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args,", "session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def getTestClass(*args, **kwargs): if test_type: modname =", "create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\"", "= importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs) except: raise", "test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection", "from credentials import CREDENTIALS as CR from testbed import TESTBED as TB TB[\"CR\"]", "def getTestClass(*args, **kwargs): if test_type: modname = test_type.lower() + \".\" + test_type.lower() else:", "import sys from pprint import pprint as pp import pytest import utils as", "tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail", "test_type: modname = test_type.lower() + \".\" + test_type.lower() else: raise Exception('Fail to load", "+ \"Test\") return cls(*args, **kwargs) except: raise Exception('Fail to load module %s' %", "connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings", "= tbinfo['stateful'][0] tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb", "Exception('Fail to load module %s' % modname) try: imod = importlib.import_module(modname) cls =", "module %s' % modname) @pytest.fixture(scope=\"session\") def utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings", "modname) try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo)", "return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0] tg =", "\"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1]", "= IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion ) return connection test_settings = create_test_settings() connection", "pytest import utils as util from ixload import IxLoadTestSettings as TestSettings from ixload", "yield ixload_settings def getTestClass(*args, **kwargs): if test_type: modname = test_type.lower() + \".\" +", "= tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() + \".\" + test_type.lower() else: raise", "connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield ixload_settings def", "IxLoadUtils from ixload import IxRestUtils as IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform", "= create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings'] = test_settings yield", "cls = getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs) except: raise Exception('Fail to", "import utils as util from ixload import IxLoadTestSettings as TestSettings from ixload import", "IxRestUtils from ixnetwork_restpy import SessionAssistant from ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\",", "} # Helper Functions def create_test_settings(): # TEST CONFIG test_settings = TestSettings.IxLoadTestSettings() test_settings.gatewayServer", "utils(): return util @pytest.fixture def create_ixload_session_url(tbinfo): ixload_settings = {} tb = tbinfo['stateful'][0] tg", "{ # format: { community name : [ port list ] } \"Traffic1@Network1\":", "= create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] = session_url ixload_settings['test_settings']", "getTestClass(*args, **kwargs): if test_type: modname = test_type.lower() + \".\" + test_type.lower() else: raise", "try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(*args, **kwargs)", "ixnetwork_restpy.testplatform.testplatform import TestPlatform targets_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"targets\")) sys.path.insert(0, targets_dir) @pytest.fixture(scope=\"session\") def tbinfo(request):", "sys from pprint import pprint as pp import pytest import utils as util", "list ] } \"Traffic1@Network1\": [(1, slot1, port1)], \"Traffic2@Network2\": [(1, slot2, port2)] } chassisList", "load module %s' % modname) try: imod = importlib.import_module(modname) cls = getattr(imod, test_type.title()", "test_type.title() + \"Test\") return cls(**tbinfo) except: raise Exception('Fail to load module %s' %", "= tg['tgen'][0][1] port1 = tg['tgen'][0][2] slot2 = tg['tgen'][1][1] port2 = tg['tgen'][1][2] test_settings.portListPerCommunity =", "= [chassisList] #test_settings.chassisList = [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer,", "= [\"10.36.79.165\"] return test_settings def create_session(test_settings): connection = IxRestUtils.getConnection( test_settings.gatewayServer, test_settings.gatewayPort, httpRedirect=test_settings.httpRedirect, version=test_settings.apiVersion", "connection test_settings = create_test_settings() connection = create_session(test_settings) connection.setApiKey(test_settings.apiKey) ixload_settings['connection'] = connection #ixload_settings['session_url'] =", "tg = { 'chassis_list': tb['server'], 'tgen': tb['tgen'], 'vxlan': tb['vxlan'], 'dpu': tb } #", "test_type.lower() else: raise Exception('Fail to load module %s' % modname) try: imod =", "test_type = tbinfo['stateless'][0]['dpu'][0]['type'] if test_type: modname = test_type.lower() + \".\" + test_type.lower() else:", "= importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except: raise Exception('Fail", "importlib.import_module(modname) cls = getattr(imod, test_type.title() + \"Test\") return cls(**tbinfo) except: raise Exception('Fail to", "+ \".\" + test_type.lower() else: raise Exception('Fail to load module %s' % modname)", "test_settings.gatewayServer = tbinfo['stateful'][0]['server'][0]['addr'] test_settings.apiVersion = \"v0\" test_settings.ixLoadVersion = \"9.20.0.279\" slot1 = tg['tgen'][0][1] port1" ]
[ "= (nums[i] - 1) * (nums[j] - 1) hashmap.update({formula: [i, j]}) max_val =", "get the maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12.", "the maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2] Output: 12", "and j=2 (indexed from 0), you will get the maximum value, that is,", "j of that array. Return the maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input:", "12 Explanation: If you choose the indices i=1 and j=2 (indexed from 0),", "(4-1)*(5-1) = 3*4 = 12. \"\"\" nums = [3, 4, 5, 2] hashmap", "1: Input: nums = [3,4,5,2] Output: 12 Explanation: If you choose the indices", "in range(i + 1, len(nums)): formula = (nums[i] - 1) * (nums[j] -", "range(i + 1, len(nums)): formula = (nums[i] - 1) * (nums[j] - 1)", "of integers nums, you will choose two different indices i and j of", "array. Return the maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2]", "j in range(i + 1, len(nums)): formula = (nums[i] - 1) * (nums[j]", "for j in range(i + 1, len(nums)): formula = (nums[i] - 1) *", "= 3*4 = 12. \"\"\" nums = [3, 4, 5, 2] hashmap =", "1, len(nums)): formula = (nums[i] - 1) * (nums[j] - 1) hashmap.update({formula: [i,", "{} for i in range(len(nums)): for j in range(i + 1, len(nums)): formula", "the indices i=1 and j=2 (indexed from 0), you will get the maximum", "= [3,4,5,2] Output: 12 Explanation: If you choose the indices i=1 and j=2", "Input: nums = [3,4,5,2] Output: 12 Explanation: If you choose the indices i=1", "i=1 and j=2 (indexed from 0), you will get the maximum value, that", "is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12. \"\"\" nums = [3, 4,", "range(len(nums)): for j in range(i + 1, len(nums)): formula = (nums[i] - 1)", "= 12. \"\"\" nums = [3, 4, 5, 2] hashmap = {} for", "you choose the indices i=1 and j=2 (indexed from 0), you will get", "= (4-1)*(5-1) = 3*4 = 12. \"\"\" nums = [3, 4, 5, 2]", "<filename>Array/MaxProductOfTwoElements.py \"\"\"Given the array of integers nums, you will choose two different indices", "hashmap = {} for i in range(len(nums)): for j in range(i + 1,", "two different indices i and j of that array. Return the maximum value", "the array of integers nums, you will choose two different indices i and", "nums, you will choose two different indices i and j of that array.", "choose two different indices i and j of that array. Return the maximum", "12. \"\"\" nums = [3, 4, 5, 2] hashmap = {} for i", "indices i=1 and j=2 (indexed from 0), you will get the maximum value,", "i and j of that array. Return the maximum value of (nums[i]-1)*(nums[j]-1). Example", "formula = (nums[i] - 1) * (nums[j] - 1) hashmap.update({formula: [i, j]}) max_val", "of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2] Output: 12 Explanation: If you", "2] hashmap = {} for i in range(len(nums)): for j in range(i +", "Return the maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2] Output:", "(nums[i] - 1) * (nums[j] - 1) hashmap.update({formula: [i, j]}) max_val = max(list(hashmap.keys()))", "- 1) * (nums[j] - 1) hashmap.update({formula: [i, j]}) max_val = max(list(hashmap.keys())) print(max_val)", "Example 1: Input: nums = [3,4,5,2] Output: 12 Explanation: If you choose the", "0), you will get the maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) =", "that array. Return the maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums =", "If you choose the indices i=1 and j=2 (indexed from 0), you will", "Explanation: If you choose the indices i=1 and j=2 (indexed from 0), you", "3*4 = 12. \"\"\" nums = [3, 4, 5, 2] hashmap = {}", "integers nums, you will choose two different indices i and j of that", "different indices i and j of that array. Return the maximum value of", "in range(len(nums)): for j in range(i + 1, len(nums)): formula = (nums[i] -", "will choose two different indices i and j of that array. Return the", "\"\"\" nums = [3, 4, 5, 2] hashmap = {} for i in", "len(nums)): formula = (nums[i] - 1) * (nums[j] - 1) hashmap.update({formula: [i, j]})", "value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12. \"\"\" nums =", "\"\"\"Given the array of integers nums, you will choose two different indices i", "of that array. Return the maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums", "(nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2] Output: 12 Explanation: If you choose", "(indexed from 0), you will get the maximum value, that is, (nums[1]-1)*(nums[2]-1) =", "array of integers nums, you will choose two different indices i and j", "you will get the maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4", "that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12. \"\"\" nums = [3,", "nums = [3,4,5,2] Output: 12 Explanation: If you choose the indices i=1 and", "(nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12. \"\"\" nums = [3, 4, 5,", "from 0), you will get the maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1)", "+ 1, len(nums)): formula = (nums[i] - 1) * (nums[j] - 1) hashmap.update({formula:", "will get the maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 =", "maximum value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2] Output: 12 Explanation:", "for i in range(len(nums)): for j in range(i + 1, len(nums)): formula =", "= [3, 4, 5, 2] hashmap = {} for i in range(len(nums)): for", "[3, 4, 5, 2] hashmap = {} for i in range(len(nums)): for j", "[3,4,5,2] Output: 12 Explanation: If you choose the indices i=1 and j=2 (indexed", "value of (nums[i]-1)*(nums[j]-1). Example 1: Input: nums = [3,4,5,2] Output: 12 Explanation: If", "choose the indices i=1 and j=2 (indexed from 0), you will get the", "= {} for i in range(len(nums)): for j in range(i + 1, len(nums)):", "nums = [3, 4, 5, 2] hashmap = {} for i in range(len(nums)):", "j=2 (indexed from 0), you will get the maximum value, that is, (nums[1]-1)*(nums[2]-1)", "and j of that array. Return the maximum value of (nums[i]-1)*(nums[j]-1). Example 1:", "5, 2] hashmap = {} for i in range(len(nums)): for j in range(i", "indices i and j of that array. Return the maximum value of (nums[i]-1)*(nums[j]-1).", "maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12. \"\"\" nums", "i in range(len(nums)): for j in range(i + 1, len(nums)): formula = (nums[i]", "Output: 12 Explanation: If you choose the indices i=1 and j=2 (indexed from", "the maximum value, that is, (nums[1]-1)*(nums[2]-1) = (4-1)*(5-1) = 3*4 = 12. \"\"\"", "you will choose two different indices i and j of that array. Return", "4, 5, 2] hashmap = {} for i in range(len(nums)): for j in" ]
[ "\"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\",", "200} in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error':", "attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error': False,", "response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type': 'string', 'values':", "' 'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes =", "[\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type':", "@pytest.fixture() def create_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\":", "'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes):", "= Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\",", "\"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode':", "= bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error': False, 'message': 'success', 'statusCode': 200}", "test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] ==", "response def test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response =", "'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']},", "\"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}]", "'Attributes were deleted successfully from the following file path: ' 'packages.json', 'statusCode': 200}", "attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\",", "def create_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}]", "200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1',", "= bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error':", "== response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\",", "\"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray", "\"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}]", "'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response def test_set_attributes(create_attributes): assert", "= bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode': 200} in response def", "attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\",", "assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_attributes", "\"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert", "= [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes)", "Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error':", "'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray", "attributes) assert [{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}] ==", "'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\",", "\"generic\", \"statistics\", \"test\", attributes) assert {'error': False, 'message': 'success', 'statusCode': 200} == response", "Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\",", "Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\",", "Bintray @pytest.fixture() def create_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"],", "test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes)", "create_file_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response", "'message': 'success', 'statusCode': 200} == response def test_search_attributes(create_attributes): bintray = Bintray() attributes =", "def test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\",", "bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes):", "[\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode': 200}", "Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert", "200} == response def test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}]", "'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\",", "{'error': False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes =", "def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}]", "== create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes = [{'name': 'att1', 'values': ['val2'],", "\"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\":", "test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}] ==", "attributes) return response def test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\",", "['val2']}, {'error': False, 'statusCode': 200}] == response def test_update_files_attributes(): bintray = Bintray() attributes", "bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response =", "test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\",", "response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False, 'message': 'Attributes were deleted", "response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string',", "[{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}] == response def", "[\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes):", "successfully from the following file path: ' 'packages.json', 'statusCode': 200} == response def", "'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1':", "response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray = Bintray()", "bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray = Bintray() response =", "'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING',", "assert {'error': False, 'message': 'Attributes were deleted successfully from the following file path:", "assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}] == response", "response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode':", "= bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error':", "bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False,", "False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"]", "'message': 'Attributes were deleted successfully from the following file path: ' 'packages.json', 'statusCode':", "\"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name':", "\"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}]", "\"test\", attributes) assert {'error': False, 'message': 'success', 'statusCode': 200} == response def test_search_attributes(create_attributes):", "[{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response def", "'values': ['val1']}, {'error': False, 'statusCode': 200}] == response def test_set_attributes(create_attributes): assert [{'name': 'att1',", "assert {'error': False, 'message': 'success', 'statusCode': 200} == response def test_search_attributes(create_attributes): bintray =", "'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes = [{'name': 'att1',", "from the following file path: ' 'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes):", "== response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type':", "= Bintray() attributes = [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\",", "[\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray", "bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type':", "= Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\",", "200}] == response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error':", "test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\",", "[{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def", "'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray = Bintray()", "200}] == response def test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\":", "bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False,", "'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\"]}]", "= bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type': 'string', 'values': ['val2']},", "['val1']}, {'error': False, 'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\",", "== response def test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\",", "[\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1',", "bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response =", "[{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert", "[{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_attributes def", "'values': ['val1']}, {'error': False, 'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\",", "response def test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\",", "'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_update_files_attributes(): bintray =", "\"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\",", "200}] == create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes = [{'name': 'att1', 'values':", "['val3']}, {'error': False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes", "bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False, 'message': 'Attributes were deleted successfully from", "False, 'statusCode': 200}] == response def test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\":", "def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\", \"generic\",", "'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name':", "\"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode': 200} in", "response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']},", "{'error': False, 'statusCode': 200}] == response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string',", "'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response def test_set_attributes(create_attributes):", "def create_file_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}]", "\"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\",", "'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray", "assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response", "bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response =", "attributes = [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error':", "import pytest from bintray.bintray import Bintray @pytest.fixture() def create_attributes(): bintray = Bintray() attributes", "= [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False, 'message': 'Attributes", "def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\",", "[{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes)", "\"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray =", "bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\",", "= bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def test_get_attributes(create_attributes): bintray = Bintray() response", "test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1',", "Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string', 'values':", "= Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes)", "\"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray()", "\"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def", "'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray", "in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False,", "def test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}]", "== create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"],", "response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values':", "response def test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert", "[\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False, 'message': 'Attributes were", "== response def test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response", "'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\": \"att1\",", "attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\",", "<filename>tests/test_attributes.py import pytest from bintray.bintray import Bintray @pytest.fixture() def create_attributes(): bintray = Bintray()", "\"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray =", "[{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return", "[{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes def", "= [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error': False, 'message':", "False, 'message': 'Attributes were deleted successfully from the following file path: ' 'packages.json',", "\"statistics\", attributes) assert {'error': False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert [{'name':", "bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error': False, 'message': 'success', 'statusCode': 200} ==", "\"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}]", "\"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes():", "False, 'statusCode': 200}] == response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values':", "'statusCode': 200}] == response def test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\",", "def test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\",", "attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}] ==", "[{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture()", "test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response", "'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response response =", "{'error': False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes =", "False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes = [{'name':", "= bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False, 'message': 'Attributes were deleted successfully", "'values': ['val3']}, {'error': False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray = Bintray()", "attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"],", "\"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type': 'string',", "attributes = [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\",", "\"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode':", "= [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes)", "= bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values': ['val1']},", "def test_set_files_attributes(): bintray = Bintray() attributes = [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}]", "== response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False,", "test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response", "Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\",", "\"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False,", "\"test\") assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] ==", "{'error': False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING',", "assert [{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response", "= bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error':", "response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']},", "[{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def", "attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\",", "= [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\",", "bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False,", "assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes", "Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False,", "attributes) assert {'error': False, 'message': 'Attributes were deleted successfully from the following file", "'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray =", "'statusCode': 200}] == response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type': 'string', 'values': ['val1']},", "from bintray.bintray import Bintray @pytest.fixture() def create_attributes(): bintray = Bintray() attributes = [{\"name\":", "['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type':", "\"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode':", "\"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values':", "\"statistics\", \"test\", attributes) assert {'error': False, 'message': 'success', 'statusCode': 200} == response def", "\"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error': False,", "[{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode':", "bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error':", "False, 'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"]) assert", "def test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}]", "False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"]", "assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response", "@pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\":", "'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray =", "= [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes)", "'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1',", "'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"] response", "attributes) assert {'error': False, 'message': 'success', 'statusCode': 200} == response def test_search_attributes(create_attributes): bintray", "bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error':", "= Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error':", "False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\":", "response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error': False, 'message': 'success', 'statusCode':", "'att1', 'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name':", "{'error': False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray = Bintray() attributes =", "['val2']}, {'error': False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray = Bintray() attributes", "attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\", \"generic\", attributes) assert \"packages.json\" == response[0][\"name\"]", "\"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type':", "\"statistics\", \"test\", [\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode':", "'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray = Bintray()", "test_set_files_attributes(): bintray = Bintray() attributes = [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response", "were deleted successfully from the following file path: ' 'packages.json', 'statusCode': 200} ==", "[\"att1\"]) assert [{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] ==", "['val1']}, {'error': False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes", "def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}]", "response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string', 'values': ['val1']},", "'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes):", "\"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert", "200}] == response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"] response =", "the following file path: ' 'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray", "create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes = [{'name': 'att1', 'values': ['val2'], 'type':", "= [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes)", "\"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING', 'values':", "\"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response def", "deleted successfully from the following file path: ' 'packages.json', 'statusCode': 200} == response", "bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes)", "'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_update_files_attributes(): bintray", "Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\",", "False, 'message': 'success', 'statusCode': 200} == response def test_search_attributes(create_attributes): bintray = Bintray() attributes", "attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert {'error': False, 'message':", "= Bintray() attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\", \"generic\", attributes) assert \"packages.json\"", "200}] == create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\":", "bintray = Bintray() attributes = [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\",", "\"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes = [{\"name\":", "'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes():", "{'error': False, 'statusCode': 200}] == response response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", [\"att1\"])", "attributes) assert [{'name': 'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}] ==", "= Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return bintray.set_attributes(\"uilianries\", \"generic\",", "'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response def test_set_attributes(create_attributes): assert [{'name':", "'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_attributes def test_update_attributes(create_attributes): bintray =", "bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes =", "\"statistics\", \"test\", attributes) assert [{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode':", "path: ' 'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes", "pytest from bintray.bintray import Bintray @pytest.fixture() def create_attributes(): bintray = Bintray() attributes =", "== response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\"]}] response =", "['val1']}, {'error': False, 'statusCode': 200}] == response def test_set_attributes(create_attributes): assert [{'name': 'att1', 'type':", "response def test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"], \"type\":", "return response def test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\")", "test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\", \"generic\", attributes)", "\"generic\", \"packages.json\", attributes) assert {'error': False, 'message': 'Attributes were deleted successfully from the", "file path: ' 'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray = Bintray()", "\"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) return response", "['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray = Bintray() attributes", "import Bintray @pytest.fixture() def create_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\":", "== response def test_update_files_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val3\"],", "[{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert", "= Bintray() attributes = [\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert", "'att1', 'type': 'STRING', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_update_files_attributes():", "200}] == response def test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"] response =", "bintray = Bintray() attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\", \"generic\", attributes) assert", "assert {'error': False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type':", "= Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name': 'att1', 'type': 'string',", "'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray = Bintray()", "\"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1', 'type': 'STRING',", "bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert", "'statusCode': 200} == response def test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\",", "[{'name': 'att1', 'type': 'string', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == response response", "'STRING', 'values': ['val1']}, {'error': False, 'statusCode': 200}] == create_file_attributes def test_set_files_attributes(): bintray =", "= [{'att1': [\"val1\", \"val2\"]}] response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False,", "following file path: ' 'packages.json', 'statusCode': 200} == response def test_search_file_attributes(create_file_attributes): bintray =", "Bintray() attributes = [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response = bintray.set_file_attributes(\"uilianries\", \"generic\",", "attributes) assert {'error': False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1',", "response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes = [\"att1\"] response = bintray.delete_file_attributes(\"uilianries\", \"generic\",", "{'error': False, 'message': 'success', 'statusCode': 200} == response def test_search_attributes(create_attributes): bintray = Bintray()", "\"packages.json\", attributes) assert {'error': False, 'message': 'Attributes were deleted successfully from the following", "'success', 'statusCode': 200} == response def test_search_attributes(create_attributes): bintray = Bintray() attributes = [{'att1':", "response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values': ['val1']}, {'error': False, 'statusCode':", "200} == response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\"]}] response", "[\"att1\"] response = bintray.delete_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert {'error': False, 'message': 'success',", "'att1', 'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_delete_attributes(create_attributes):", "\"test\", attributes) assert [{'name': 'att1', 'type': 'string', 'values': ['val2']}, {'error': False, 'statusCode': 200}]", "\"values\": [\"val3\"], \"type\": \"string\"}] response = bintray.update_file_attributes(\"uilianries\", \"generic\", \"packages.json\", attributes) assert [{'name': 'att1',", "'type': 'STRING', 'values': ['val3']}, {'error': False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray", "bintray = Bintray() attributes = [{'name': 'att1', 'values': ['val2'], 'type': \"string\"}] response =", "create_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val1\"], \"type\": \"string\"}] return", "False, 'statusCode': 200} in response def test_get_files_attributes(create_file_attributes): assert [{'name': 'att1', 'type': 'STRING', 'values':", "'statusCode': 200}] == response def test_delete_attributes(create_attributes): bintray = Bintray() attributes = [\"att1\"] response", "'values': ['val2']}, {'error': False, 'statusCode': 200}] == response def test_update_files_attributes(): bintray = Bintray()", "{'error': False, 'statusCode': 200}] == response def test_delete_file_attributes(create_file_attributes): bintray = Bintray() attributes =", "Bintray() attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\", \"generic\", attributes) assert \"packages.json\" ==", "def test_get_attributes(create_attributes): bintray = Bintray() response = bintray.get_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\") assert [{'name':", "return bintray.set_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) @pytest.fixture() def create_file_attributes(): bintray = Bintray() attributes", "= Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\",", "{'error': False, 'statusCode': 200}] == response def test_update_files_attributes(): bintray = Bintray() attributes =", "create_attributes def test_update_attributes(create_attributes): bintray = Bintray() attributes = [{\"name\": \"att1\", \"values\": [\"val2\"], \"type\":", "response = bintray.search_attributes(\"uilianries\", \"generic\", \"statistics\", attributes) assert {'error': False, 'statusCode': 200} in response", "\"values\": [\"val2\"], \"type\": \"string\"}] response = bintray.update_attributes(\"uilianries\", \"generic\", \"statistics\", \"test\", attributes) assert [{'name':", "response def test_search_file_attributes(create_file_attributes): bintray = Bintray() attributes = [{'att1': [\"val1\"]}] response = bintray.search_file_attributes(\"uilianries\",", "bintray.bintray import Bintray @pytest.fixture() def create_attributes(): bintray = Bintray() attributes = [{\"name\": \"att1\",", "{'error': False, 'message': 'Attributes were deleted successfully from the following file path: '" ]
[ "dates_float[0]) # Find coefficients of best-fit polynomial f(x) of degree p p_coeff =", "label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add axis", "numpy as np import datetime from dateutil.tz import tzutc def plot_water_levels(station, dates, levels):", "best-fit polynomial\"\"\" # Convert dates to floats dates_float = matplotlib.dates.date2num(dates) # Create a", "matplotlib import numpy as np import datetime from dateutil.tz import tzutc def plot_water_levels(station,", "degree p p_coeff = np.polyfit(dates_shifted, levels, p) # Convert coefficient into a polynomial", "p) # Convert coefficient into a polynomial that can be evaluated, # e.g.", "axis labels, add legend, rotate date labels and add plot title plt.xlabel('Dates from", "[station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low,", "# Display plot plt.tight_layout() # This makes sure plot does not cut off", "for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of best-fit polynomial", "= np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x -", "variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\")", "x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x", "- x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") #", "2E: Plots water level against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low =", "cut off date labels return plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F:", "data points plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot polynomial fit and typical", "off date labels return plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots", "polynomial fit and typical range low/high at 30 points along interval # (note", "the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low =", "of best-fit polynomial f(x) of degree p p_coeff = np.polyfit(dates_shifted, levels, p) #", "poly = np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted, levels, '.', label='Data Points')", "the best-fit polynomial\"\"\" # Convert dates to floats dates_float = matplotlib.dates.date2num(dates) # Create", "polynomial f(x) of degree p p_coeff = np.polyfit(dates_shifted, levels, p) # Convert coefficient", "30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\")", "levels): \"\"\"Task 2E: Plots water level against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates)", "into a polynomial that can be evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff)", "Find coefficients of best-fit polynomial f(x) of degree p p_coeff = np.polyfit(dates_shifted, levels,", "in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of best-fit polynomial f(x) of", "coefficients of best-fit polynomial f(x) of degree p p_coeff = np.polyfit(dates_shifted, levels, p)", "levels, p): \"\"\"Task 2F: Plots the water level data and the best-fit polynomial\"\"\"", "= np.polyfit(dates_shifted, levels, p) # Convert coefficient into a polynomial that can be", "plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") # Add axis labels, add", "labels, add legend, rotate date labels and add plot title plt.xlabel('Date') plt.ylabel('Water Level", "along interval # (note that polynomial is evaluated using the date shift) x", "= [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of", "labels and add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45)", "the water level data and the best-fit polynomial\"\"\" # Convert dates to floats", "np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]),", "against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates,", "# This makes sure plot does not cut off date labels return plt.show()", "coefficient into a polynomial that can be evaluated, # e.g. poly(0.3) poly =", "from dateutil.tz import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water level", "as np import datetime from dateutil.tz import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task", "import datetime from dateutil.tz import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots", "dates to floats dates_float = matplotlib.dates.date2num(dates) # Create a shifted time list dates_shifted", "to floats dates_float = matplotlib.dates.date2num(dates) # Create a shifted time list dates_shifted =", "dates_float = matplotlib.dates.date2num(dates) # Create a shifted time list dates_shifted = [] for", "points along interval # (note that polynomial is evaluated using the date shift)", "= [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high,", "Plot original data points plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot polynomial fit", "label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate", "plot does not cut off date labels return plt.show() def plot_water_level_with_fit(station, dates, levels,", "range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x,", "Add axis labels, add legend, rotate date labels and add plot title plt.xlabel('Date')", "shifted time list dates_shifted = [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0])", "add legend, rotate date labels and add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water", "labels, add legend, rotate date labels and add plot title plt.xlabel('Dates from {}'.format(dates[-1]))", "levels, '.', label='Data Points') # Plot polynomial fit and typical range low/high at", "list dates_shifted = [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find", "- dates_float[0]) # Find coefficients of best-fit polynomial f(x) of degree p p_coeff", "that polynomial is evaluated using the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30)", "Create a shifted time list dates_shifted = [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i]", "labels and add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) #", "# Find coefficients of best-fit polynomial f(x) of degree p p_coeff = np.polyfit(dates_shifted,", "data and the best-fit polynomial\"\"\" # Convert dates to floats dates_float = matplotlib.dates.date2num(dates)", "label=\"Typical Low\") # Add axis labels, add legend, rotate date labels and add", "a polynomial that can be evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff) #", "be evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot original data points", "range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add axis labels, add legend,", "# e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted, levels,", "from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() #", "label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") # Add axis", "plt.title(station.name) # Display plot plt.tight_layout() # This makes sure plot does not cut", "date labels return plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots the", "= [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high,", "i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of best-fit polynomial f(x)", "legend, rotate date labels and add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend()", "range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\")", "e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted, levels, '.',", "polynomial that can be evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot", "plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots the water level data", "as plt import matplotlib import numpy as np import datetime from dateutil.tz import", "dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial", "matplotlib.pyplot as plt import matplotlib import numpy as np import datetime from dateutil.tz", "add legend, rotate date labels and add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)')", "tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water level against time\"\"\" #Assign", "#Assign variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water", "plt.tight_layout() # This makes sure plot does not cut off date labels return", "water level data and the best-fit polynomial\"\"\" # Convert dates to floats dates_float", "[] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of best-fit", "plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This", "= matplotlib.dates.date2num(dates) # Create a shifted time list dates_shifted = [] for i", "shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x,", "plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This makes", "time list dates_shifted = [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) #", "rotate date labels and add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45)", "levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add", "# Add axis labels, add legend, rotate date labels and add plot title", "# Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical", "plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This makes sure plot does not", "import matplotlib import numpy as np import datetime from dateutil.tz import tzutc def", "# Create a shifted time list dates_shifted = [] for i in range(len(dates_float)):", "Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\")", "p): \"\"\"Task 2F: Plots the water level data and the best-fit polynomial\"\"\" #", "plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout()", "level against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot", "# Convert dates to floats dates_float = matplotlib.dates.date2num(dates) # Create a shifted time", "and the best-fit polynomial\"\"\" # Convert dates to floats dates_float = matplotlib.dates.date2num(dates) #", "dateutil.tz import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water level against", "np.polyfit(dates_shifted, levels, p) # Convert coefficient into a polynomial that can be evaluated,", "a shifted time list dates_shifted = [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] -", "# Convert coefficient into a polynomial that can be evaluated, # e.g. poly(0.3)", "level data and the best-fit polynomial\"\"\" # Convert dates to floats dates_float =", "np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot", "title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() #", "Display plot plt.tight_layout() # This makes sure plot does not cut off date", "range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of best-fit polynomial f(x) of degree", "label='Data Points') # Plot polynomial fit and typical range low/high at 30 points", "Convert coefficient into a polynomial that can be evaluated, # e.g. poly(0.3) poly", "time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels,", "poly(0.3) poly = np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted, levels, '.', label='Data", "add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) #", "plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water level against time\"\"\" #Assign variables range_high", "# (note that polynomial is evaluated using the date shift) x = np.linspace(dates_shifted[0],", "not cut off date labels return plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task", "plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add axis labels, add", "makes sure plot does not cut off date labels return plt.show() def plot_water_level_with_fit(station,", "[station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical", "at 30 points along interval # (note that polynomial is evaluated using the", "poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\")", "plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot polynomial fit and typical range low/high", "Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") # Add axis labels,", "title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot", "label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate", "range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") # Add axis labels, add legend,", "Plot polynomial fit and typical range low/high at 30 points along interval #", "= [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates,", "date labels and add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend()", "(m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This makes sure plot", "can be evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot original data", "points plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot polynomial fit and typical range", "# Plot original data points plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot polynomial", "and add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name)", "High\") plt.plot(x, range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate date", "plt.plot(dates, range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate date labels", "date labels and add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name)", "legend, rotate date labels and add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level", "dates, levels): \"\"\"Task 2E: Plots water level against time\"\"\" #Assign variables range_high =", "30 points along interval # (note that polynomial is evaluated using the date", "dates_shifted = [] for i in range(len(dates_float)): dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients", "floats dates_float = matplotlib.dates.date2num(dates) # Create a shifted time list dates_shifted = []", "plt import matplotlib import numpy as np import datetime from dateutil.tz import tzutc", "= np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted, levels, '.', label='Data Points') #", "Low\") # Add axis labels, add legend, rotate date labels and add plot", "# Plot polynomial fit and typical range low/high at 30 points along interval", "using the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low", "typical range low/high at 30 points along interval # (note that polynomial is", "labels return plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots the water", "polynomial\"\"\" # Convert dates to floats dates_float = matplotlib.dates.date2num(dates) # Create a shifted", "def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water level against time\"\"\" #Assign variables", "dates, levels, p): \"\"\"Task 2F: Plots the water level data and the best-fit", "plt.plot(x, range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate date labels", "dates_shifted.append(dates_float[i] - dates_float[0]) # Find coefficients of best-fit polynomial f(x) of degree p", "p_coeff = np.polyfit(dates_shifted, levels, p) # Convert coefficient into a polynomial that can", "def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots the water level data and", "of degree p p_coeff = np.polyfit(dates_shifted, levels, p) # Convert coefficient into a", "[station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical", "Points') # Plot polynomial fit and typical range low/high at 30 points along", "High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate date", "evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot original data points plt.plot(dates_shifted,", "\"\"\"Task 2E: Plots water level against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low", "Plots water level against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates)", "plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This makes sure plot does", "range low/high at 30 points along interval # (note that polynomial is evaluated", "date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x) range_low = [station.typical_range[0]]*len(x)", "Plots the water level data and the best-fit polynomial\"\"\" # Convert dates to", "best-fit polynomial f(x) of degree p p_coeff = np.polyfit(dates_shifted, levels, p) # Convert", "interval # (note that polynomial is evaluated using the date shift) x =", "and add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display", "plot plt.tight_layout() # This makes sure plot does not cut off date labels", "np import datetime from dateutil.tz import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E:", "range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates,", "original data points plt.plot(dates_shifted, levels, '.', label='Data Points') # Plot polynomial fit and", "range_low, label=\"Typical Low\") # Add axis labels, add legend, rotate date labels and", "is evaluated using the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high =", "evaluated using the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high = [station.typical_range[1]]*len(x)", "range_low = [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\")", "matplotlib.dates.date2num(dates) # Create a shifted time list dates_shifted = [] for i in", "that can be evaluated, # e.g. poly(0.3) poly = np.poly1d(p_coeff) # Plot original", "x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical Low\") # Add", "return plt.show() def plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots the water level", "import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water level against time\"\"\"", "[station.typical_range[0]]*len(dates) # Plot plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low,", "\"\"\"Task 2F: Plots the water level data and the best-fit polynomial\"\"\" # Convert", "Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This makes sure", "import numpy as np import datetime from dateutil.tz import tzutc def plot_water_levels(station, dates,", "plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout()", "import matplotlib.pyplot as plt import matplotlib import numpy as np import datetime from", "does not cut off date labels return plt.show() def plot_water_level_with_fit(station, dates, levels, p):", "(note that polynomial is evaluated using the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1],", "plt.plot(dates, levels, label=\"Water Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") #", "add plot title plt.xlabel('Date') plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot", "p p_coeff = np.polyfit(dates_shifted, levels, p) # Convert coefficient into a polynomial that", "plot_water_level_with_fit(station, dates, levels, p): \"\"\"Task 2F: Plots the water level data and the", "{}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display plot plt.tight_layout() # This", "axis labels, add legend, rotate date labels and add plot title plt.xlabel('Date') plt.ylabel('Water", "fit and typical range low/high at 30 points along interval # (note that", "= [station.typical_range[0]]*len(x) plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x,", "plt.plot(x, poly(x - x[0]), label=\"Polynomial Fit\") plt.plot(x, range_high, label=\"Typical High\") plt.plot(x, range_low, label=\"Typical", "plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)') plt.legend() plt.xticks(rotation=45) plt.title(station.name) # Display", "polynomial is evaluated using the date shift) x = np.linspace(dates_shifted[0], dates_shifted[-1], 30) range_high", "'.', label='Data Points') # Plot polynomial fit and typical range low/high at 30", "Level\") plt.plot(dates, range_high, label=\"Typical High\") plt.plot(dates, range_low, label=\"Typical Low\") # Add axis labels,", "low/high at 30 points along interval # (note that polynomial is evaluated using", "Add axis labels, add legend, rotate date labels and add plot title plt.xlabel('Dates", "and typical range low/high at 30 points along interval # (note that polynomial", "datetime from dateutil.tz import tzutc def plot_water_levels(station, dates, levels): \"\"\"Task 2E: Plots water", "2F: Plots the water level data and the best-fit polynomial\"\"\" # Convert dates", "f(x) of degree p p_coeff = np.polyfit(dates_shifted, levels, p) # Convert coefficient into", "rotate date labels and add plot title plt.xlabel('Dates from {}'.format(dates[-1])) plt.ylabel('Water Level (m)')", "water level against time\"\"\" #Assign variables range_high = [station.typical_range[1]]*len(dates) range_low = [station.typical_range[0]]*len(dates) #", "sure plot does not cut off date labels return plt.show() def plot_water_level_with_fit(station, dates,", "levels, p) # Convert coefficient into a polynomial that can be evaluated, #", "This makes sure plot does not cut off date labels return plt.show() def", "Convert dates to floats dates_float = matplotlib.dates.date2num(dates) # Create a shifted time list" ]
[ "m] = 0 m += 1 if m >= steps: break return X,", "0: X[n, m] = vector_id else: X[n, m] = 0 m += 1", "= self._get_features(docs, labels, steps) return X, y def _get_features(self, docs, labels, steps): X", "spacy import numpy as np class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat'])", "X, y = self._get_features(docs, labels, steps) return X, y def _get_features(self, docs, labels,", "else: X[n, m] = 0 m += 1 if m >= steps: break", "def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print 'tokenizing", "import spacy import numpy as np class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md',", "m = 0 for token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >=", "+= 1 if m >= steps: break return X, labels def get_embedding(self): return", "[self.nlp(unicode(text)) for text in texts] X, y = self._get_features(docs, labels, steps) return X,", "docs = [self.nlp(unicode(text)) for text in texts] X, y = self._get_features(docs, labels, steps)", "# Author: <NAME> from __future__ import unicode_literals import spacy import numpy as np", "disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print 'tokenizing with spacy...' docs = [self.nlp(unicode(text))", "in enumerate(docs): m = 0 for token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if", "0 m += 1 if m >= steps: break return X, labels def", "np.zeros((len(labels), steps), dtype='int32') for n, doc in enumerate(docs): m = 0 for token", "docs, labels, steps): X = np.zeros((len(labels), steps), dtype='int32') for n, doc in enumerate(docs):", "in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m] = vector_id", "unicode_literals import spacy import numpy as np class SpPipe(): def __init__(self): self.nlp =", "= np.zeros((len(labels), steps), dtype='int32') for n, doc in enumerate(docs): m = 0 for", "for text in texts] X, y = self._get_features(docs, labels, steps) return X, y", "vector_id >= 0: X[n, m] = vector_id else: X[n, m] = 0 m", "= vector_id else: X[n, m] = 0 m += 1 if m >=", "texts, labels, steps=10): print 'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for text in", "numpy as np class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self,", "= token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m] = vector_id else: X[n, m]", "<NAME> from __future__ import unicode_literals import spacy import numpy as np class SpPipe():", "= 0 m += 1 if m >= steps: break return X, labels", "text in texts] X, y = self._get_features(docs, labels, steps) return X, y def", "= 0 for token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0:", "X, y def _get_features(self, docs, labels, steps): X = np.zeros((len(labels), steps), dtype='int32') for", "return X, y def _get_features(self, docs, labels, steps): X = np.zeros((len(labels), steps), dtype='int32')", "Author: <NAME> from __future__ import unicode_literals import spacy import numpy as np class", "0 for token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n,", "if vector_id >= 0: X[n, m] = vector_id else: X[n, m] = 0", "spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print 'tokenizing with spacy...' docs =", "X[n, m] = 0 m += 1 if m >= steps: break return", "steps): X = np.zeros((len(labels), steps), dtype='int32') for n, doc in enumerate(docs): m =", "with spacy...' docs = [self.nlp(unicode(text)) for text in texts] X, y = self._get_features(docs,", "1 if m >= steps: break return X, labels def get_embedding(self): return self.nlp.vocab.vectors.data", "spacy...' docs = [self.nlp(unicode(text)) for text in texts] X, y = self._get_features(docs, labels,", ">= 0: X[n, m] = vector_id else: X[n, m] = 0 m +=", "enumerate(docs): m = 0 for token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id", "np class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels,", "def _get_features(self, docs, labels, steps): X = np.zeros((len(labels), steps), dtype='int32') for n, doc", "<gh_stars>0 # Author: <NAME> from __future__ import unicode_literals import spacy import numpy as", "SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print", "__call__(self, texts, labels, steps=10): print 'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for text", "steps=10): print 'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for text in texts] X,", "vector_id else: X[n, m] = 0 m += 1 if m >= steps:", "self._get_features(docs, labels, steps) return X, y def _get_features(self, docs, labels, steps): X =", "in texts] X, y = self._get_features(docs, labels, steps) return X, y def _get_features(self,", "class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10):", "labels, steps) return X, y def _get_features(self, docs, labels, steps): X = np.zeros((len(labels),", "labels, steps): X = np.zeros((len(labels), steps), dtype='int32') for n, doc in enumerate(docs): m", "steps) return X, y def _get_features(self, docs, labels, steps): X = np.zeros((len(labels), steps),", "token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m] =", "= [self.nlp(unicode(text)) for text in texts] X, y = self._get_features(docs, labels, steps) return", "as np class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts,", "X[n, m] = vector_id else: X[n, m] = 0 m += 1 if", "from __future__ import unicode_literals import spacy import numpy as np class SpPipe(): def", "doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m] = vector_id else:", "= spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print 'tokenizing with spacy...' docs", "y = self._get_features(docs, labels, steps) return X, y def _get_features(self, docs, labels, steps):", "labels, steps=10): print 'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for text in texts]", "self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print 'tokenizing with spacy...'", "def __call__(self, texts, labels, steps=10): print 'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for", "token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m] = vector_id else: X[n, m] =", "n, doc in enumerate(docs): m = 0 for token in doc: vector_id =", "for n, doc in enumerate(docs): m = 0 for token in doc: vector_id", "m += 1 if m >= steps: break return X, labels def get_embedding(self):", "for token in doc: vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m]", "steps), dtype='int32') for n, doc in enumerate(docs): m = 0 for token in", "import unicode_literals import spacy import numpy as np class SpPipe(): def __init__(self): self.nlp", "__future__ import unicode_literals import spacy import numpy as np class SpPipe(): def __init__(self):", "'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for text in texts] X, y =", "X = np.zeros((len(labels), steps), dtype='int32') for n, doc in enumerate(docs): m = 0", "texts] X, y = self._get_features(docs, labels, steps) return X, y def _get_features(self, docs,", "import numpy as np class SpPipe(): def __init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def", "y def _get_features(self, docs, labels, steps): X = np.zeros((len(labels), steps), dtype='int32') for n,", "dtype='int32') for n, doc in enumerate(docs): m = 0 for token in doc:", "m] = vector_id else: X[n, m] = 0 m += 1 if m", "doc in enumerate(docs): m = 0 for token in doc: vector_id = token.vocab.vectors.find(key=token.orth)", "_get_features(self, docs, labels, steps): X = np.zeros((len(labels), steps), dtype='int32') for n, doc in", "__init__(self): self.nlp = spacy.load('en_core_web_md', disable=['ner','parser','tagger','textcat']) def __call__(self, texts, labels, steps=10): print 'tokenizing with", "vector_id = token.vocab.vectors.find(key=token.orth) if vector_id >= 0: X[n, m] = vector_id else: X[n,", "print 'tokenizing with spacy...' docs = [self.nlp(unicode(text)) for text in texts] X, y" ]
[ "app import database def get_single_json_entity(entity_query): query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r for", "1: json_entity = {k: v for k, v in row_proxies[0].items()} else: json_entity =", "query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r for r in query_result_proxy] if len(row_proxies)", "import database def get_single_json_entity(entity_query): query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r for r", "database.session.execute(entity_query) database.session.commit() row_proxies = [r for r in query_result_proxy] if len(row_proxies) == 1:", "= {k: v for k, v in row_proxies[0].items()} else: json_entity = {} return", "== 1: json_entity = {k: v for k, v in row_proxies[0].items()} else: json_entity", "database.session.commit() row_proxies = [r for r in query_result_proxy] if len(row_proxies) == 1: json_entity", "database def get_single_json_entity(entity_query): query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r for r in", "{k: v for k, v in row_proxies[0].items()} else: json_entity = {} return json_entity", "def get_single_json_entity(entity_query): query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r for r in query_result_proxy]", "json_entity = {k: v for k, v in row_proxies[0].items()} else: json_entity = {}", "[r for r in query_result_proxy] if len(row_proxies) == 1: json_entity = {k: v", "= [r for r in query_result_proxy] if len(row_proxies) == 1: json_entity = {k:", "row_proxies = [r for r in query_result_proxy] if len(row_proxies) == 1: json_entity =", "for r in query_result_proxy] if len(row_proxies) == 1: json_entity = {k: v for", "if len(row_proxies) == 1: json_entity = {k: v for k, v in row_proxies[0].items()}", "= database.session.execute(entity_query) database.session.commit() row_proxies = [r for r in query_result_proxy] if len(row_proxies) ==", "query_result_proxy] if len(row_proxies) == 1: json_entity = {k: v for k, v in", "r in query_result_proxy] if len(row_proxies) == 1: json_entity = {k: v for k,", "in query_result_proxy] if len(row_proxies) == 1: json_entity = {k: v for k, v", "len(row_proxies) == 1: json_entity = {k: v for k, v in row_proxies[0].items()} else:", "from app import database def get_single_json_entity(entity_query): query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r", "get_single_json_entity(entity_query): query_result_proxy = database.session.execute(entity_query) database.session.commit() row_proxies = [r for r in query_result_proxy] if" ]
[ "= \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code ==", "auth=('api_username', 'api_password')) if raw_response.status_code == 200: result = raw_response.json() print(result) else: print(str(raw_response.status_code) +", "= raw_response.json() print(result) else: print(str(raw_response.status_code) + \" - \" + raw_response.text) if __name__", "200: result = raw_response.json() print(result) else: print(str(raw_response.status_code) + \" - \" + raw_response.text)", "def main(): # url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username',", "requests def main(): # url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url,", "raw_response.json() print(result) else: print(str(raw_response.status_code) + \" - \" + raw_response.text) if __name__ ==", "requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200: result = raw_response.json() print(result) else: print(str(raw_response.status_code)", "main(): # url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password'))", "= requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200: result = raw_response.json() print(result) else:", "'api_password')) if raw_response.status_code == 200: result = raw_response.json() print(result) else: print(str(raw_response.status_code) + \"", "raw_response.status_code == 200: result = raw_response.json() print(result) else: print(str(raw_response.status_code) + \" - \"", "import requests def main(): # url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response =", "print(result) else: print(str(raw_response.status_code) + \" - \" + raw_response.text) if __name__ == \"__main__\":", "raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200: result = raw_response.json() print(result)", "== 200: result = raw_response.json() print(result) else: print(str(raw_response.status_code) + \" - \" +", "else: print(str(raw_response.status_code) + \" - \" + raw_response.text) if __name__ == \"__main__\": main()", "\"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200: result = raw_response.json()", "<filename>test.py import requests def main(): # url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response", "url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200: result", "if raw_response.status_code == 200: result = raw_response.json() print(result) else: print(str(raw_response.status_code) + \" -", "= \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200: result =", "# url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if", "url = \"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code", "result = raw_response.json() print(result) else: print(str(raw_response.status_code) + \" - \" + raw_response.text) if", "\"http://127.0.0.1:5000/\" url = \"http://localhost:8080/\" raw_response = requests.get(url=url, auth=('api_username', 'api_password')) if raw_response.status_code == 200:" ]
[ "import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path:", "prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some Prime 1 script layers have sizes", "mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld, )) assert area.mrea.build() is", "32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES,", "test_compare_p1(p1_mrea_path): # Known difference: some Prime 1 script layers have sizes that #", "\"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path:", "<filename>test/formats/test_mrea.py from pathlib import Path import pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats", "multiples of 32; building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path):", "from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import Game", "from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area", "Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): #", "test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) ->", "prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference:", "building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path)", "Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def", "= \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\")", "return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some Prime 1 script layers have", "pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path):", "test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert", "not multiples of 32; building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def", "of 32; building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA,", "prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction", "Mrea from retro_data_structures.game_check import Game from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2", "32; building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME,", "pathlib import Path import pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl", "p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded", "import Game from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\")", "isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes mlvl", "= prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld, )) assert area.mrea.build() is not", "test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager(", "-> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some Prime 1 script", "some Prime 1 script layers have sizes that # are not multiples of", "import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld, )) assert", "assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes", "Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded,", "p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager,", "def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id:", "SpecialFunction from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction(", "def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded =", "are not multiples of 32; building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path)", "# Known difference: some Prime 1 script layers have sizes that # are", "test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path)", "test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager,", "pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA,", "def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some Prime", "sizes that # are not multiples of 32; building always pads to 32", "Known difference: some Prime 1 script layers have sizes that # are not", "1 script layers have sizes that # are not multiples of 32; building", "to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA,", "retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import Game from test import test_lib _mrea_path_p1", "Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import Game from test import", "def test_compare_p1(p1_mrea_path): # Known difference: some Prime 1 script layers have sizes that", "Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes mlvl =", "always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME, p1_mrea_path) def test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def", "= \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) ->", "test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return", "retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area =", "import AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check", "# are not multiples of 32; building always pads to 32 test_lib.parse_and_build_compare(MREA, Game.PRIME,", "@pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return", "import MREA, Mrea from retro_data_structures.game_check import Game from test import test_lib _mrea_path_p1 =", "Game from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def", "retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import Game from", "def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2)", "p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea,", "retro_data_structures.game_check import Game from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\"", "AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea)", "test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import", "_mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1)", "from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld,", "MREA, Mrea from retro_data_structures.game_check import Game from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\"", "return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known", "mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from", "AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import", "test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl)", "import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import Game from test", "from retro_data_structures.formats.mrea import MREA, Mrea from retro_data_structures.game_check import Game from test import test_lib", "have sizes that # are not multiples of 32; building always pads to", "test_compare_p1_parsed(p1_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId):", "layers have sizes that # are not multiples of 32; building always pads", "_p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def", "def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, )", "encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from", "from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project)", "@pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some", "test_lib.parse_and_build_compare_parsed(MREA, Game.PRIME, p1_mrea_path) def test_compare_p2(p2_mrea_path): test_lib.parse_and_build_compare_parsed(MREA, Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource,", "_p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some Prime 1", "def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4,", "script layers have sizes that # are not multiples of 32; building always", "decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager):", "Path import pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea", "Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums", "retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld, ))", "retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea from", "from pathlib import Path import pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import", ") assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction import SpecialFunction from retro_data_structures.enums import", "Game.ECHOES, p2_mrea_path) def test_compare_all_p2(prime2_asset_manager, mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id,", "\"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def", "_mrea_path_p2 = \"Resources/Worlds/SandWorld/!SandWorld_Master/00_pickup_sand_d_dark.MREA\" @pytest.fixture(name=\"p1_mrea_path\") def _p1_mrea_path(prime1_pwe_project) -> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project)", "= test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def test_add_instance(prime2_asset_manager): from retro_data_structures.properties.echoes.objects.SpecialFunction", "that # are not multiples of 32; building always pads to 32 test_lib.parse_and_build_compare(MREA,", "Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path): # Known difference: some Prime 1 script layers", "import SpecialFunction from retro_data_structures.enums import echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F)", "difference: some Prime 1 script layers have sizes that # are not multiples", "Prime 1 script layers have sizes that # are not multiples of 32;", "mrea_asset_id: AssetId): resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded,", "resource, decoded, encoded = test_lib.parse_and_build_compare_from_manager( prime2_asset_manager, mrea_asset_id, Mrea, ) assert isinstance(decoded, Mrea) def", "from retro_data_structures.game_check import Game from test import test_lib _mrea_path_p1 = \"Resources/Worlds/EndCinema/!EndCinema_Master/01_endcinema.MREA\" _mrea_path_p2 =", "import pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import", "echoes mlvl = prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld, )) assert area.mrea.build()", "-> Path: return prime1_pwe_project.joinpath(_mrea_path_p1) @pytest.fixture(name=\"p2_mrea_path\") def _p2_mrea_path(prime2_pwe_project) -> Path: return prime2_pwe_project.joinpath(_mrea_path_p2) def test_compare_p1(p1_mrea_path):", "prime2_asset_manager.get_parsed_asset(0x42b935e4, type_hint=Mlvl) area = mlvl.get_area(0x5DFA984F) area.get_layer(\"Default\").add_instance_with(SpecialFunction( function=echoes.Function.Darkworld, )) assert area.mrea.build() is not None", "from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl from retro_data_structures.formats.mrea import MREA, Mrea", "import Path import pytest from retro_data_structures.base_resource import AssetId from retro_data_structures.formats import Mlvl from" ]
[ "writing, software # distributed under the License is distributed on an \"AS IS\"", "input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\",", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "\"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats = \",\" seq = []", "License. # You may obtain a copy of the License at # #", "License. import argparse import os import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir,", "resource_file_object: with open(output_path, 'a') as cpp_file_object: length = 0; all_the_content = resource_file_object.read(); template0", "in all_the_content: seq.append(str(hex(content))) length = length + 1 byte_code = formats.join(seq); input_file =", "= 0; all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 = \"const uint8_t", "\\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0", "cpp_file_object.write(template0 + template1 + template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False)", "args = parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file, output_path)", "with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a') as cpp_file_object: length =", "parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args", "law or agreed to in writing, software # distributed under the License is", "import os import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as", "the License for the specific language governing permissions and # limitations under the", "compliance with the License. # You may obtain a copy of the License", "type=str, required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path", "all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] =", "\"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end = _binary_$1_start", "_binary_$1_end = _binary_$1_start + $2;\"; formats = \",\" seq = [] for content", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "this file except in compliance with the License. # You may obtain a", "permissions and # limitations under the License. import argparse import os import sys", "_binary_$1_start + $2;\"; formats = \",\" seq = [] for content in all_the_content:", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy',", "you may not use this file except in compliance with the License. #", "for the specific language governing permissions and # limitations under the License. import", "\\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length))", "as resource_file_object: with open(output_path, 'a') as cpp_file_object: length = 0; all_the_content = resource_file_object.read();", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def", "ANY KIND, either express or implied. # See the License for the specific", "input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2", "parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir, input_file = os.path.split(args.input)", "specific language governing permissions and # limitations under the License. import argparse import", "open(output_path, 'a') as cpp_file_object: length = 0; all_the_content = resource_file_object.read(); template0 = \"#include", "cpp_file_object: length = 0; all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 =", "template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\", "in compliance with the License. # You may obtain a copy of the", "language governing permissions and # limitations under the License. import argparse import os", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "template1 + template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str,", "type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args =", "use this file except in compliance with the License. # You may obtain", "= [] for content in all_the_content: seq.append(str(hex(content))) length = length + 1 byte_code", "required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args()", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", ".replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 +", "'a') as cpp_file_object: length = 0; all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\";", "\",\" seq = [] for content in all_the_content: seq.append(str(hex(content))) length = length +", "Ltd. # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "not use this file except in compliance with the License. # You may", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "_binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\";", "0; all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2]", "parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch',", "See the License for the specific language governing permissions and # limitations under", "str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def main(): parser = argparse.ArgumentParser()", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "length = 0; all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 = \"const", "= resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\";", "License, Version 2.0 (the \"License\"); # you may not use this file except", "os import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object:", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "= argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str,", "formats = \",\" seq = [] for content in all_the_content: seq.append(str(hex(content))) length =", "required=False) args = parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file,", "template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\",", "length + 1 byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\",", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "type=str, required=False) args = parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir,", "= \\ \"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats = \",\" seq", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def main(): parser", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path =", "resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2", "seq = [] for content in all_the_content: seq.append(str(hex(content))) length = length + 1", "Device Co., Ltd. # Licensed under the Apache License, Version 2.0 (the \"License\");", "length = length + 1 byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1", "coding: utf-8 -*- # Copyright (c) 2020 Huawei Device Co., Ltd. # Licensed", "python # -*- coding: utf-8 -*- # Copyright (c) 2020 Huawei Device Co.,", "OF ANY KIND, either express or implied. # See the License for the", "uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats = \",\" seq = [] for", "content in all_the_content: seq.append(str(hex(content))) length = length + 1 byte_code = formats.join(seq); input_file", "2.0 (the \"License\"); # you may not use this file except in compliance", "Copyright (c) 2020 Huawei Device Co., Ltd. # Licensed under the Apache License,", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Huawei Device", "# you may not use this file except in compliance with the License.", "= formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length))", "output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a') as cpp_file_object: length", "input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a') as cpp_file_object:", "\\ \"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats = \",\" seq =", "= length + 1 byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 =", "template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output',", "+ template1 + template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input',", "argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False)", "= input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code))", "agreed to in writing, software # distributed under the License is distributed on", "all_the_content: seq.append(str(hex(content))) length = length + 1 byte_code = formats.join(seq); input_file = input_file.replace(\".\",", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "governing permissions and # limitations under the License. import argparse import os import", "+ 1 byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file))", "# Copyright (c) 2020 Huawei Device Co., Ltd. # Licensed under the Apache", "-*- # Copyright (c) 2020 Huawei Device Co., Ltd. # Licensed under the", "(the \"License\"); # you may not use this file except in compliance with", "cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str,", "+ template2); def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True)", "parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir,", "# # Unless required by applicable law or agreed to in writing, software", "input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file, output_path) if __name__ == '__main__':", "str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1", "2020 Huawei Device Co., Ltd. # Licensed under the Apache License, Version 2.0", "parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output)", "express or implied. # See the License for the specific language governing permissions", "the specific language governing permissions and # limitations under the License. import argparse", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True)", "by applicable law or agreed to in writing, software # distributed under the", "byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\",", "type=str, required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir, input_file", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def main():", "<stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const uint8_t*", "= \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end =", "either express or implied. # See the License for the specific language governing", "open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a') as cpp_file_object: length = 0;", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a') as cpp_file_object: length = 0; all_the_content", "def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a')", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path,", "= \",\" seq = [] for content in all_the_content: seq.append(str(hex(content))) length = length", "the License. import argparse import os import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with", "template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "# -*- coding: utf-8 -*- # Copyright (c) 2020 Huawei Device Co., Ltd.", "input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file, output_path) if __name__ ==", "-*- coding: utf-8 -*- # Copyright (c) 2020 Huawei Device Co., Ltd. #", "= \"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\", "file except in compliance with the License. # You may obtain a copy", "# limitations under the License. import argparse import os import sys def resource_file_to_bytecode(input_dir,", "def main(): parser = argparse.ArgumentParser() parser.add_argument('--objcopy', type=str, required=False) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str,", "+ $2;\"; formats = \",\" seq = [] for content in all_the_content: seq.append(str(hex(content)))", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "{$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats =", "[] for content in all_the_content: seq.append(str(hex(content))) length = length + 1 byte_code =", "License for the specific language governing permissions and # limitations under the License.", "under the License. import argparse import os import sys def resource_file_to_bytecode(input_dir, input_file, output_path):", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", ".replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0)", "the License. # You may obtain a copy of the License at #", "import argparse import os import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file),", "= {$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats", "to in writing, software # distributed under the License is distributed on an", "template2 = \\ \"const uint8_t* _binary_$1_end = _binary_$1_start + $2;\"; formats = \",\"", "Co., Ltd. # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "as cpp_file_object: length = 0; all_the_content = resource_file_object.read(); template0 = \"#include <stdint.h>\\n\"; template1", "implied. # See the License for the specific language governing permissions and #", ".replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2); def main(): parser =", "\"License\"); # you may not use this file except in compliance with the", "resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\ as resource_file_object: with open(output_path, 'a') as", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "for content in all_the_content: seq.append(str(hex(content))) length = length + 1 byte_code = formats.join(seq);", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 +", "required by applicable law or agreed to in writing, software # distributed under", "limitations under the License. import argparse import os import sys def resource_file_to_bytecode(input_dir, input_file,", "(c) 2020 Huawei Device Co., Ltd. # Licensed under the Apache License, Version", "str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\",", "template0 = \"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 =", "applicable law or agreed to in writing, software # distributed under the License", "argparse import os import sys def resource_file_to_bytecode(input_dir, input_file, output_path): with open(os.path.join(input_dir, input_file), 'rb')\\", "seq.append(str(hex(content))) length = length + 1 byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\")", "parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file, output_path) if __name__", "formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\", "\"_\") template1 = template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 =", "required=True) parser.add_argument('--output', type=str, required=True) parser.add_argument('--arch', type=str, required=False) args = parser.parse_args() input_dir, input_file =", "Huawei Device Co., Ltd. # Licensed under the Apache License, Version 2.0 (the", "utf-8 -*- # Copyright (c) 2020 Huawei Device Co., Ltd. # Licensed under", "\"#include <stdint.h>\\n\"; template1 = \"const uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const", "'rb')\\ as resource_file_object: with open(output_path, 'a') as cpp_file_object: length = 0; all_the_content =", "str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate();", "or agreed to in writing, software # distributed under the License is distributed", "with open(output_path, 'a') as cpp_file_object: length = 0; all_the_content = resource_file_object.read(); template0 =", "or implied. # See the License for the specific language governing permissions and", "1 byte_code = formats.join(seq); input_file = input_file.replace(\".\", \"_\") template1 = template1.replace(\"$1\", str(input_file)) \\", "= _binary_$1_start + $2;\"; formats = \",\" seq = [] for content in", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "= parser.parse_args() input_dir, input_file = os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file, output_path) if", "= os.path.split(args.input) output_path = os.path.abspath(args.output) resource_file_to_bytecode(input_dir, input_file, output_path) if __name__ == '__main__': sys.exit(main())", "$2;\"; formats = \",\" seq = [] for content in all_the_content: seq.append(str(hex(content))) length", "with the License. # You may obtain a copy of the License at", "uint8_t _binary_$1_start[$2] = {$3};\\n\"; template2 = \\ \"const uint8_t* _binary_$1_end = _binary_$1_start +", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "= template1.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) \\ .replace(\"$3\", str(byte_code)) template2 = template2.replace(\"$1\", str(input_file))", "and # limitations under the License. import argparse import os import sys def", "in writing, software # distributed under the License is distributed on an \"AS", "= template2.replace(\"$1\", str(input_file)) \\ .replace(\"$2\", str(length)) cpp_file_object.seek(0) cpp_file_object.truncate(); cpp_file_object.write(template0 + template1 + template2);", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "with else throw AttributeError: 'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\"", "\"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN =", "SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False SQLALCHEMY_ECHO = True REDIS_URL", "= False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False SQLALCHEMY_ECHO = True REDIS_URL =", "# encoding: utf-8 import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG =", "# web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False", "\"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS", "SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website':", "BASEDIR = basedir DEBUG = False SECRET_KEY = 'This is a secret key", "SECRET_KEY = 'This is a secret key forexample' # not end with else", "'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN", "import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False SECRET_KEY =", "python # encoding: utf-8 import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG", "# web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS =", "#!/usr/bin/env python # encoding: utf-8 import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir", "encoding: utf-8 import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False", "basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False SECRET_KEY = 'This is", "web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH", "# other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False SQLALCHEMY_ECHO", "utf-8 import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False SECRET_KEY", "no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\",", "= False SECRET_KEY = 'This is a secret key forexample' # not end", "\"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", #", "'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 }", "'This is a secret key forexample' # not end with else throw AttributeError:", "'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库", "base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb':", "key forexample' # not end with else throw AttributeError: 'tuple' object has no", "# base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库", "a secret key forexample' # not end with else throw AttributeError: 'tuple' object", "= { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", #", "forexample' # not end with else throw AttributeError: 'tuple' object has no attribute", "# not end with else throw AttributeError: 'tuple' object has no attribute 'drivername'", "attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", #", "not end with else throw AttributeError: 'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI", "basedir DEBUG = False SECRET_KEY = 'This is a secret key forexample' #", "} SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False SQLALCHEMY_ECHO = True", "end with else throw AttributeError: 'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI =", "else throw AttributeError: 'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" #", "'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS =", "DEBUG = False SECRET_KEY = 'This is a secret key forexample' # not", "{ 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理", "other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False SQLALCHEMY_ECHO =", "<filename>config.py #!/usr/bin/env python # encoding: utf-8 import os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR =", "SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\",", "'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH =", "= 'This is a secret key forexample' # not end with else throw", "os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False SECRET_KEY = 'This is a secret", "= os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False SECRET_KEY = 'This is a", "AttributeError: 'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS", "os basedir = os.path.abspath(os.path.dirname(__file__)) BASEDIR = basedir DEBUG = False SECRET_KEY = 'This", "is a secret key forexample' # not end with else throw AttributeError: 'tuple'", "= \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base': \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\", # web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\",", "web数据库 'website': \"mysql+pymysql://root:rootpassword@127.0.0.1/website?charset=utf8\", # web数据库 'otherdb': \"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False", "\"mysql+pymysql://root:rootpassword@127.0.0.1/otherdb?charset=utf8\", # other管理 } SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False", "= basedir DEBUG = False SECRET_KEY = 'This is a secret key forexample'", "object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = {", "False SQLALCHEMY_COMMIT_ON_TEARDOWN = False SQLALCHEMY_AUTOFLUSH = False SQLALCHEMY_ECHO = True REDIS_URL = 'redis://:@127.0.0.1:6379'", "secret key forexample' # not end with else throw AttributeError: 'tuple' object has", "throw AttributeError: 'tuple' object has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理", "False SECRET_KEY = 'This is a secret key forexample' # not end with", "has no attribute 'drivername' SQLALCHEMY_DATABASE_URI = \"mysql+pymysql://root:rootpassword@127.0.0.1/test?charset=utf8\" # base管理 SQLALCHEMY_BINDS = { 'base':" ]
[ "\"\"\"Dependency injection example, cars module.\"\"\" class Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize", "module.\"\"\" class Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine = engine", "Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine = engine # Engine", "example, cars module.\"\"\" class Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine", "class Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine = engine #", "car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine = engine # Engine is injected", "injection example, cars module.\"\"\" class Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\"", "\"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine = engine # Engine is", "cars module.\"\"\" class Car: \"\"\"Example car.\"\"\" def __init__(self, engine): \"\"\"Initialize instance.\"\"\" self._engine =" ]
[ "raise NotImplementedError id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings =", "and not self.t1: emb = self.emb_1 self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0]", "FromNumpyGraph(FromNumpy): def train(self, G): assert G is None return self.emb class FromNumpyAlign(object): def", "random import networkx as nx import numpy as np class Zero(object): def __init__(self,", "<reponame>S-HuaBomb/Contrib import random import networkx as nx import numpy as np class Zero(object):", "import networkx as nx import numpy as np class Zero(object): def __init__(self, hidden_size,", "self.emb_1 self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb =", "G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__()", "emb = self.emb_2 self.t2 = True else: raise NotImplementedError id2node = dict([(vid, node)", "super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path) def train(self, G): id2node =", "np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy):", "def train(self, G): id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings", "self.emb_1.shape[0] and not self.t1: emb = self.emb_1 self.t1 = True elif G.number_of_nodes() ==", "= hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False, False", "return embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert G is None return self.emb", "train(self, G): id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings =", "node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes() ==", "def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs):", "in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self, G):", "embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings", "FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1)", "= True else: raise NotImplementedError id2node = dict([(vid, node) for vid, node in", "def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb = self.emb_1", "as np class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def train(self,", "**kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path) def train(self, G): id2node", "G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb = self.emb_1 self.t1 = True elif", "nx import numpy as np class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size =", "emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2", "train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb = self.emb_1 self.t1", "not self.t1: emb = self.emb_1 self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0] and", "node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]] for i in range(len(id2node))])", "self.hidden_size = hidden_size self.emb = np.load(emb_path) def train(self, G): id2node = dict([(vid, node)", "as nx import numpy as np class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size", "hidden_size, **kwargs): self.hidden_size = hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object):", "if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb = self.emb_1 self.t1 = True", "self.t2 = True else: raise NotImplementedError id2node = dict([(vid, node) for vid, node", "__init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path) def", "for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]] for i in range(len(id2node))]) return", "Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def train(self, G): return np.zeros((G.number_of_nodes(),", "= np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False, False def train(self, G):", "for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert", "hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path) def train(self,", "False, False def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb", "else: raise NotImplementedError id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings", "**kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 =", "hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2)", "G): if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb = self.emb_1 self.t1 =", "None return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size =", "__init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class", "assert G is None return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2,", "True elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb = self.emb_2 self.t2 =", "hidden_size self.emb = np.load(emb_path) def train(self, G): id2node = dict([(vid, node) for vid,", "False def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1: emb =", "= dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]] for i", "G): id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]]", "def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2", "__init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 =", "np.load(emb_path_2) self.t1, self.t2 = False, False def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0]", "np.load(emb_path) def train(self, G): id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())])", "not self.t2: emb = self.emb_2 self.t2 = True else: raise NotImplementedError id2node =", "embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert G is None return", "class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 =", "G is None return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs):", "id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for", "self.hidden_size = hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self,", "id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]] for", "= dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i", "= np.load(emb_path_2) self.t1, self.t2 = False, False def train(self, G): if G.number_of_nodes() ==", "networkx as nx import numpy as np class Zero(object): def __init__(self, hidden_size, **kwargs):", "class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb", "elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb = self.emb_2 self.t2 = True", "hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False, False def", "class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def train(self, G): return", "assert G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert G", "dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]] for i in", "train(self, G): assert G is None return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size,", "in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0]", "import numpy as np class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size", "and not self.t2: emb = self.emb_2 self.t2 = True else: raise NotImplementedError id2node", "emb = self.emb_1 self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2:", "self.emb = np.load(emb_path) def train(self, G): id2node = dict([(vid, node) for vid, node", "**kwargs): self.hidden_size = hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def", "= False, False def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and not self.t1:", "True else: raise NotImplementedError id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())])", "self.emb_2.shape[0] and not self.t2: emb = self.emb_2 self.t2 = True else: raise NotImplementedError", "is None return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size", "embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert G is None return self.emb class", "dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in", "class FromNumpyGraph(FromNumpy): def train(self, G): assert G is None return self.emb class FromNumpyAlign(object):", "np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False, False def train(self, G): if", "i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self,", "vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]] for i in range(len(id2node))]) return embeddings", "self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False, False def train(self,", "self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False, False def train(self, G): if G.number_of_nodes()", "G): assert G is None return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1,", "= self.emb_2 self.t2 = True else: raise NotImplementedError id2node = dict([(vid, node) for", "emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1,", "== embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert G is None", "self.hidden_size = hidden_size self.emb_1 = np.load(emb_path_1) self.emb_2 = np.load(emb_path_2) self.t1, self.t2 = False,", "self.t2 = False, False def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and not", "self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb = self.emb_2", "G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert G is", "self.emb_2 self.t2 = True else: raise NotImplementedError id2node = dict([(vid, node) for vid,", "= np.load(emb_path) def train(self, G): id2node = dict([(vid, node) for vid, node in", "== self.emb_2.shape[0] and not self.t2: emb = self.emb_2 self.t2 = True else: raise", "self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size self.emb_1", "emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path) def train(self, G):", "vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes()", "return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size", "= np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings class", "FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb =", "numpy as np class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def", "self.t1: emb = self.emb_1 self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0] and not", "hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path,", "np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size =", "return self.emb class FromNumpyAlign(object): def __init__(self, hidden_size, emb_path_1, emb_path_2, **kwargs): self.hidden_size = hidden_size", "self.t1, self.t2 = False, False def train(self, G): if G.number_of_nodes() == self.emb_1.shape[0] and", "self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path) def train(self, G): id2node = dict([(vid,", "def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size self.emb = np.load(emb_path)", "np class Zero(object): def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def train(self, G):", "G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb = self.emb_2 self.t2 = True else:", "= self.emb_1 self.t1 = True elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb", "= hidden_size self.emb = np.load(emb_path) def train(self, G): id2node = dict([(vid, node) for", "= hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size,", "import random import networkx as nx import numpy as np class Zero(object): def", "def __init__(self, hidden_size, **kwargs): self.hidden_size = hidden_size def train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size))", "range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def train(self, G): assert", "== self.emb_1.shape[0] and not self.t1: emb = self.emb_1 self.t1 = True elif G.number_of_nodes()", "node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))])", "def train(self, G): assert G is None return self.emb class FromNumpyAlign(object): def __init__(self,", "for i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return embeddings class FromNumpyGraph(FromNumpy): def", "NotImplementedError id2node = dict([(vid, node) for vid, node in enumerate(G.nodes())]) embeddings = np.asarray([emb[id2node[i]]", "train(self, G): return np.zeros((G.number_of_nodes(), self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy,", "= True elif G.number_of_nodes() == self.emb_2.shape[0] and not self.t2: emb = self.emb_2 self.t2", "self.t2: emb = self.emb_2 self.t2 = True else: raise NotImplementedError id2node = dict([(vid,", "self.hidden_size)) class FromNumpy(object): def __init__(self, hidden_size, emb_path, **kwargs): super(FromNumpy, self).__init__() self.hidden_size = hidden_size", "enumerate(G.nodes())]) embeddings = np.asarray([self.emb[id2node[i]] for i in range(len(id2node))]) assert G.number_of_nodes() == embeddings.shape[0] return" ]
[ "# coding: utf-8 ''' Unit tests on the canvas framework. ''' from .", "canvas framework. ''' from . import exceptions, utils, json, views, controller_service, assets, \\", "Unit tests on the canvas framework. ''' from . import exceptions, utils, json,", "''' Unit tests on the canvas framework. ''' from . import exceptions, utils,", "coding: utf-8 ''' Unit tests on the canvas framework. ''' from . import", "tests on the canvas framework. ''' from . import exceptions, utils, json, views,", "on the canvas framework. ''' from . import exceptions, utils, json, views, controller_service,", "the canvas framework. ''' from . import exceptions, utils, json, views, controller_service, assets,", "framework. ''' from . import exceptions, utils, json, views, controller_service, assets, \\ model,", "''' from . import exceptions, utils, json, views, controller_service, assets, \\ model, service", "utf-8 ''' Unit tests on the canvas framework. ''' from . import exceptions," ]
[ "15, 21, 28, 36, 45, 55, ... # By converting each letter in", "function took {finish - start:.3f} seconds\") return result return wrapper @timeit def main():", "sum_of_letters = 0 for letter in word: sum_of_letters += ord(letter)-64 if sum_of_letters in", "finish = perf_counter() print(f\"{func.__name__} function took {finish - start:.3f} seconds\") return result return", "as f: words = f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters = 0 for", "its alphabetical position and adding these values we form a word value. For", "i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\") as f:", "#!python3 # coding: utf-8 # The nth term of the sequence of triangle", "perf_counter() result = func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function took {finish -", "and adding these values we form a word value. For example, the word", "are triangle words? #https://projecteuler.net/problem=42 from time import perf_counter def timeit(func): def wrapper(*args, **kwargs):", "= func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function took {finish - start:.3f} seconds\")", "6, 10, 15, 21, 28, 36, 45, 55, ... # By converting each", "The nth term of the sequence of triangle numbers is given by, tn", "the word value is a triangle number then we shall call the word", "19 + 11 + 25 = 55 = t10. If the word value", "# The nth term of the sequence of triangle numbers is given by,", "in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\") as f: words", "adding these values we form a word value. For example, the word value", "letter in word: sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values: counter += 1", "open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters =", "are: # 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...", "alphabetical position and adding these values we form a word value. For example,", "+ 11 + 25 = 55 = t10. If the word value is", "'Save Link/Target As...'), a 16K text file containing nearly two-thousand common English words,", "= 55 = t10. If the word value is a triangle number then", "if sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\", counter) if __name__ == \"__main__\":", "triangle number then we shall call the word a triangle word. # Using", "in word: sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\",", "a 16K text file containing nearly two-thousand common English words, how many are", "triangle numbers.py<gh_stars>0 #!python3 # coding: utf-8 # The nth term of the sequence", "sequence of triangle numbers is given by, tn = ½n(n+1); so the first", "words.txt (right click and 'Save Link/Target As...'), a 16K text file containing nearly", "counter = 0 with open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for word", "word value for SKY is 19 + 11 + 25 = 55 =", "is a triangle number then we shall call the word a triangle word.", "and 'Save Link/Target As...'), a 16K text file containing nearly two-thousand common English", "perf_counter def timeit(func): def wrapper(*args, **kwargs): start = perf_counter() result = func(*args, **kwargs)", "triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for", "letter in a word to a number corresponding to its alphabetical position and", "English words, how many are triangle words? #https://projecteuler.net/problem=42 from time import perf_counter def", "# coding: utf-8 # The nth term of the sequence of triangle numbers", "words: sum_of_letters = 0 for letter in word: sum_of_letters += ord(letter)-64 if sum_of_letters", "these values we form a word value. For example, the word value for", "the sequence of triangle numbers is given by, tn = ½n(n+1); so the", "range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\") as f: words =", "in a word to a number corresponding to its alphabetical position and adding", "triangle numbers are: # 1, 3, 6, 10, 15, 21, 28, 36, 45,", "nearly two-thousand common English words, how many are triangle words? #https://projecteuler.net/problem=42 from time", "word value. For example, the word value for SKY is 19 + 11", "28, 36, 45, 55, ... # By converting each letter in a word", "word value is a triangle number then we shall call the word a", "number corresponding to its alphabetical position and adding these values we form a", "result = func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function took {finish - start:.3f}", "def main(): triangle_num_values = [] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter =", "took {finish - start:.3f} seconds\") return result return wrapper @timeit def main(): triangle_num_values", "each letter in a word to a number corresponding to its alphabetical position", "term of the sequence of triangle numbers is given by, tn = ½n(n+1);", "the first ten triangle numbers are: # 1, 3, 6, 10, 15, 21,", "import perf_counter def timeit(func): def wrapper(*args, **kwargs): start = perf_counter() result = func(*args,", "text file containing nearly two-thousand common English words, how many are triangle words?", "to a number corresponding to its alphabetical position and adding these values we", "45, 55, ... # By converting each letter in a word to a", "wrapper(*args, **kwargs): start = perf_counter() result = func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__}", "t10. If the word value is a triangle number then we shall call", "file containing nearly two-thousand common English words, how many are triangle words? #https://projecteuler.net/problem=42", "for SKY is 19 + 11 + 25 = 55 = t10. If", "ord(letter)-64 if sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\", counter) if __name__ ==", "word. # Using words.txt (right click and 'Save Link/Target As...'), a 16K text", "words, how many are triangle words? #https://projecteuler.net/problem=42 from time import perf_counter def timeit(func):", "two-thousand common English words, how many are triangle words? #https://projecteuler.net/problem=42 from time import", "<filename>files/042 - coded triangle numbers.py<gh_stars>0 #!python3 # coding: utf-8 # The nth term", "25 = 55 = t10. If the word value is a triangle number", "As...'), a 16K text file containing nearly two-thousand common English words, how many", "click and 'Save Link/Target As...'), a 16K text file containing nearly two-thousand common", "start:.3f} seconds\") return result return wrapper @timeit def main(): triangle_num_values = [] for", "= f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters = 0 for letter in word:", "a triangle word. # Using words.txt (right click and 'Save Link/Target As...'), a", "For example, the word value for SKY is 19 + 11 + 25", "3, 6, 10, 15, 21, 28, 36, 45, 55, ... # By converting", "then we shall call the word a triangle word. # Using words.txt (right", "36, 45, 55, ... # By converting each letter in a word to", "def timeit(func): def wrapper(*args, **kwargs): start = perf_counter() result = func(*args, **kwargs) finish", "so the first ten triangle numbers are: # 1, 3, 6, 10, 15,", "= t10. If the word value is a triangle number then we shall", "numbers.py<gh_stars>0 #!python3 # coding: utf-8 # The nth term of the sequence of", "11 + 25 = 55 = t10. If the word value is a", "func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function took {finish - start:.3f} seconds\") return", "value for SKY is 19 + 11 + 25 = 55 = t10.", "how many are triangle words? #https://projecteuler.net/problem=42 from time import perf_counter def timeit(func): def", "21, 28, 36, 45, 55, ... # By converting each letter in a", "[] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\")", "+= ord(letter)-64 if sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\", counter) if __name__", "By converting each letter in a word to a number corresponding to its", "triangle word. # Using words.txt (right click and 'Save Link/Target As...'), a 16K", "shall call the word a triangle word. # Using words.txt (right click and", "is given by, tn = ½n(n+1); so the first ten triangle numbers are:", "start = perf_counter() result = func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function took", "many are triangle words? #https://projecteuler.net/problem=42 from time import perf_counter def timeit(func): def wrapper(*args,", "triangle_num_values = [] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with", "= 0 for letter in word: sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values:", "**kwargs) finish = perf_counter() print(f\"{func.__name__} function took {finish - start:.3f} seconds\") return result", "= 0 with open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for word in", "first ten triangle numbers are: # 1, 3, 6, 10, 15, 21, 28,", "print(f\"{func.__name__} function took {finish - start:.3f} seconds\") return result return wrapper @timeit def", "= perf_counter() print(f\"{func.__name__} function took {finish - start:.3f} seconds\") return result return wrapper", "return result return wrapper @timeit def main(): triangle_num_values = [] for i in", "½n(n+1); so the first ten triangle numbers are: # 1, 3, 6, 10,", "numbers are: # 1, 3, 6, 10, 15, 21, 28, 36, 45, 55,", "example, the word value for SKY is 19 + 11 + 25 =", "a word value. For example, the word value for SKY is 19 +", "of triangle numbers is given by, tn = ½n(n+1); so the first ten", "coded triangle numbers.py<gh_stars>0 #!python3 # coding: utf-8 # The nth term of the", "55, ... # By converting each letter in a word to a number", "by, tn = ½n(n+1); so the first ten triangle numbers are: # 1,", "the word value for SKY is 19 + 11 + 25 = 55", "timeit(func): def wrapper(*args, **kwargs): start = perf_counter() result = func(*args, **kwargs) finish =", "words = f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters = 0 for letter in", "sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\", counter) if", "{finish - start:.3f} seconds\") return result return wrapper @timeit def main(): triangle_num_values =", "= ½n(n+1); so the first ten triangle numbers are: # 1, 3, 6,", "for letter in word: sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values: counter +=", "we form a word value. For example, the word value for SKY is", "Using words.txt (right click and 'Save Link/Target As...'), a 16K text file containing", "@timeit def main(): triangle_num_values = [] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter", "= perf_counter() result = func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function took {finish", "sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\", counter) if __name__ == \"__main__\": main()", "the word a triangle word. # Using words.txt (right click and 'Save Link/Target", "wrapper @timeit def main(): triangle_num_values = [] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5))", "#https://projecteuler.net/problem=42 from time import perf_counter def timeit(func): def wrapper(*args, **kwargs): start = perf_counter()", "\"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters = 0", "number then we shall call the word a triangle word. # Using words.txt", "f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters = 0 for letter in word: sum_of_letters", "word in words: sum_of_letters = 0 for letter in word: sum_of_letters += ord(letter)-64", "+ 25 = 55 = t10. If the word value is a triangle", "values we form a word value. For example, the word value for SKY", "word a triangle word. # Using words.txt (right click and 'Save Link/Target As...'),", "word: sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values: counter += 1 print(\"\\n\", counter)", "(right click and 'Save Link/Target As...'), a 16K text file containing nearly two-thousand", "ten triangle numbers are: # 1, 3, 6, 10, 15, 21, 28, 36,", "converting each letter in a word to a number corresponding to its alphabetical", "result return wrapper @timeit def main(): triangle_num_values = [] for i in range(1,", "containing nearly two-thousand common English words, how many are triangle words? #https://projecteuler.net/problem=42 from", "perf_counter() print(f\"{func.__name__} function took {finish - start:.3f} seconds\") return result return wrapper @timeit", "= [] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\",", "27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"')", "for word in words: sum_of_letters = 0 for letter in word: sum_of_letters +=", "in words: sum_of_letters = 0 for letter in word: sum_of_letters += ord(letter)-64 if", "55 = t10. If the word value is a triangle number then we", "Link/Target As...'), a 16K text file containing nearly two-thousand common English words, how", "with open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters", "word to a number corresponding to its alphabetical position and adding these values", "**kwargs): start = perf_counter() result = func(*args, **kwargs) finish = perf_counter() print(f\"{func.__name__} function", "f: words = f.readlines()[0][1:-1].split('\",\"') for word in words: sum_of_letters = 0 for letter", "for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0 with open(\"p042_words.txt\", \"r\") as", "common English words, how many are triangle words? #https://projecteuler.net/problem=42 from time import perf_counter", "If the word value is a triangle number then we shall call the", "return wrapper @timeit def main(): triangle_num_values = [] for i in range(1, 27):", "# By converting each letter in a word to a number corresponding to", "utf-8 # The nth term of the sequence of triangle numbers is given", "# 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ... #", "1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ... # By", "a number corresponding to its alphabetical position and adding these values we form", "from time import perf_counter def timeit(func): def wrapper(*args, **kwargs): start = perf_counter() result", "coding: utf-8 # The nth term of the sequence of triangle numbers is", "... # By converting each letter in a word to a number corresponding", "tn = ½n(n+1); so the first ten triangle numbers are: # 1, 3,", "SKY is 19 + 11 + 25 = 55 = t10. If the", "is 19 + 11 + 25 = 55 = t10. If the word", "triangle words? #https://projecteuler.net/problem=42 from time import perf_counter def timeit(func): def wrapper(*args, **kwargs): start", "seconds\") return result return wrapper @timeit def main(): triangle_num_values = [] for i", "value. For example, the word value for SKY is 19 + 11 +", "call the word a triangle word. # Using words.txt (right click and 'Save", "a word to a number corresponding to its alphabetical position and adding these", "value is a triangle number then we shall call the word a triangle", "words? #https://projecteuler.net/problem=42 from time import perf_counter def timeit(func): def wrapper(*args, **kwargs): start =", "10, 15, 21, 28, 36, 45, 55, ... # By converting each letter", "position and adding these values we form a word value. For example, the", "main(): triangle_num_values = [] for i in range(1, 27): triangle_num_values.append(int(i*(i+1)*0.5)) counter = 0", "of the sequence of triangle numbers is given by, tn = ½n(n+1); so", "given by, tn = ½n(n+1); so the first ten triangle numbers are: #", "triangle numbers is given by, tn = ½n(n+1); so the first ten triangle", "corresponding to its alphabetical position and adding these values we form a word", "16K text file containing nearly two-thousand common English words, how many are triangle", "numbers is given by, tn = ½n(n+1); so the first ten triangle numbers", "we shall call the word a triangle word. # Using words.txt (right click", "time import perf_counter def timeit(func): def wrapper(*args, **kwargs): start = perf_counter() result =", "form a word value. For example, the word value for SKY is 19", "a triangle number then we shall call the word a triangle word. #", "nth term of the sequence of triangle numbers is given by, tn =", "0 for letter in word: sum_of_letters += ord(letter)-64 if sum_of_letters in triangle_num_values: counter", "0 with open(\"p042_words.txt\", \"r\") as f: words = f.readlines()[0][1:-1].split('\",\"') for word in words:", "def wrapper(*args, **kwargs): start = perf_counter() result = func(*args, **kwargs) finish = perf_counter()", "to its alphabetical position and adding these values we form a word value.", "- coded triangle numbers.py<gh_stars>0 #!python3 # coding: utf-8 # The nth term of", "- start:.3f} seconds\") return result return wrapper @timeit def main(): triangle_num_values = []", "# Using words.txt (right click and 'Save Link/Target As...'), a 16K text file" ]
[ "\"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when a signature fails authenticity checks\"\"\" pass", "from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to be used", "lambda address: None if address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible", "import * from vesicle import Vesicle # _Session is a custom sessionmaker that", "\"updating\") } # Possible states of 1ups ONEUP_STATES = { -1: \"disabled\", 0:", "class VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's state does not allow for", "# Import at bottom to avoid circular imports # Import all models to", "pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona profile specified for an", "is a custom sessionmaker that returns a session prefconfigured with the # model", "states of 1ups ONEUP_STATES = { -1: \"disabled\", 0: \"active\", 1: \"unknown author\"", "all models to allow querying db binds from nucleus.models import * from vesicle", "\"\"\"Throw this error when the active Persona is not authorized for an action\"\"\"", "\"updating\") } # Possible states of planets PLANET_STATES = { -1: (-1, \"unavailable\"),", "Import at bottom to avoid circular imports # Import all models to allow", "database connections Returns: Session: SQLAlchemy session object \"\"\" # Produces integrity errors! #", "\"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter: {}\".format(name)), } # Setup Blinker namespace", "imports # Import all models to allow querying db binds from nucleus.models import", "not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error when the active Persona is", "_Session() # db.session is managed by Flask-SQLAlchemy and bound to a request return", "Blinker namespace notification_signals = blinker.Namespace() # Setup logger namespace logger = logging.getLogger('nucleus') #", "\"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda", "sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\":", "= { -1: \"disabled\", 0: \"active\", 1: \"unknown author\" } CHANGE_TYPES = (\"insert\",", "\"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not", "\"\"\"Throw this error when a signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw", "# _Session is a custom sessionmaker that returns a session prefconfigured with the", "notification_signals = blinker.Namespace() # Setup logger namespace logger = logging.getLogger('nucleus') # Source formatting", "(0, \"published\"), 1: (1, \"private\"), 2: (2, \"updating\") } # Possible states of", "logging import blinker from web_ui import db, app from sqlalchemy.orm import sessionmaker ERROR", "namespace logger = logging.getLogger('nucleus') # Source formatting helper source_format = lambda address: None", "1: \"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this", "authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona profile specified", "prefconfigured with the # model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session():", "(2, \"private\"), 3: (3, \"updating\") } # Possible states of planets PLANET_STATES =", "not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter: {}\".format(name)), } #", "Vesicle's state does not allow for an action\"\"\" pass # Import at bottom", "from nucleus.models import * from vesicle import Vesicle # _Session is a custom", "sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No", "logger namespace logger = logging.getLogger('nucleus') # Source formatting helper source_format = lambda address:", "# Setup logger namespace logger = logging.getLogger('nucleus') # Source formatting helper source_format =", "author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when", "\"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name:", "does not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data for this request:", "allow querying db binds from nucleus.models import * from vesicle import Vesicle #", "# Import all models to allow querying db binds from nucleus.models import *", "action\"\"\" pass # Import at bottom to avoid circular imports # Import all", "\"\"\"Return a session to be used for database connections Returns: Session: SQLAlchemy session", "is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars STAR_STATES =", "error when a Vesicle's state does not allow for an action\"\"\" pass #", "= { \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload", "PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona profile specified for an action is", "when the active Persona is not authorized for an action\"\"\" pass class VesicleStateError(Exception):", "} CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when a", "a Vesicle's state does not allow for an action\"\"\" pass # Import at", "this error when a Vesicle's state does not allow for an action\"\"\" pass", "# Setup Blinker namespace notification_signals = blinker.Namespace() # Setup logger namespace logger =", "address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars STAR_STATES", "pass class UnauthorizedError(Exception): \"\"\"Throw this error when the active Persona is not authorized", "class UnauthorizedError(Exception): \"\"\"Throw this error when the active Persona is not authorized for", "session to be used for database connections Returns: Session: SQLAlchemy session object \"\"\"", "0: \"active\", 1: \"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception):", "to be used for database connections Returns: Session: SQLAlchemy session object \"\"\" #", "Possible states of stars STAR_STATES = { -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"),", "} # Possible states of planets PLANET_STATES = { -1: (-1, \"unavailable\"), 0:", "= lambda address: None if address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) #", "exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\":", "lambda id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP", "{}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9,", "{}\".format(name)), } # Setup Blinker namespace notification_signals = blinker.Namespace() # Setup logger namespace", "error when the Persona profile specified for an action is not available\"\"\" pass", "Returns: Session: SQLAlchemy session object \"\"\" # Produces integrity errors! # return _Session()", "(1, \"private\"), 2: (2, \"updating\") } # Possible states of 1ups ONEUP_STATES =", "# Possible states of planets PLANET_STATES = { -1: (-1, \"unavailable\"), 0: (0,", "when a signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when", "bottom to avoid circular imports # Import all models to allow querying db", "web_ui import db, app from sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1,", "Vesicle # _Session is a custom sessionmaker that returns a session prefconfigured with", "this error when the Persona profile specified for an action is not available\"\"\"", "import blinker from web_ui import db, app from sqlalchemy.orm import sessionmaker ERROR =", "specified for an action is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error", "(2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not exist:", "_Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to be used for database", "# Possible states of 1ups ONEUP_STATES = { -1: \"disabled\", 0: \"active\", 1:", "found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda name:", "for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's state", "nucleus.models import * from vesicle import Vesicle # _Session is a custom sessionmaker", "data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid.", "message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3,", "\"active\", 1: \"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw", "None if address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of", "does not allow for an action\"\"\" pass # Import at bottom to avoid", "an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's state does", "payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda", "import db, app from sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No", "Source formatting helper source_format = lambda address: None if address is None else", "not authorized for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error when a", "bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to be", "not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data for this request: {}\".format(name)),", "(1, \"draft\"), 2: (2, \"private\"), 3: (3, \"updating\") } # Possible states of", "ONEUP_STATES = { -1: \"disabled\", 0: \"active\", 1: \"unknown author\" } CHANGE_TYPES =", "\"\"\"Throw this error when a Vesicle's state does not allow for an action\"\"\"", "error when the active Persona is not authorized for an action\"\"\" pass class", "an action is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error when the", "of stars STAR_STATES = { -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0,", "# db.session is managed by Flask-SQLAlchemy and bound to a request return db.session", "data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not exist: \".format(name)), \"MISSING_KEY\":", "PLANET_STATES = { -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"), 2:", "session object \"\"\" # Produces integrity errors! # return _Session() # db.session is", "(4, \"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6,", "\"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)),", "object \"\"\" # Produces integrity errors! # return _Session() # db.session is managed", "# model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session", "of planets PLANET_STATES = { -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1,", "models to allow querying db binds from nucleus.models import * from vesicle import", "Persona is not authorized for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error", "the # model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a", "\"MISSING_KEY\": lambda name: (4, \"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid", "port=address[1]) # Possible states of stars STAR_STATES = { -2: (-2, \"deleted\"), -1:", "import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2,", "# return _Session() # db.session is managed by Flask-SQLAlchemy and bound to a", "class PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona profile specified for an action", "Import all models to allow querying db binds from nucleus.models import * from", "\"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter: {}\".format(name)), }", "states of stars STAR_STATES = { -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0:", "logger = logging.getLogger('nucleus') # Source formatting helper source_format = lambda address: None if", "\"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not exist: \".format(name)),", "_Session is a custom sessionmaker that returns a session prefconfigured with the #", "{ -1: \"disabled\", 0: \"active\", 1: \"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\",", "lambda name: (3, \"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing", "to allow querying db binds from nucleus.models import * from vesicle import Vesicle", "when a Vesicle's state does not allow for an action\"\"\" pass # Import", "name: (4, \"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\":", "(-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"), 2: (2, \"private\"), 3: (3,", "= { -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1,", "namespace notification_signals = blinker.Namespace() # Setup logger namespace logger = logging.getLogger('nucleus') # Source", "active Persona is not authorized for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this", "\"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\":", "used for database connections Returns: Session: SQLAlchemy session object \"\"\" # Produces integrity", "2: (2, \"private\"), 3: (3, \"updating\") } # Possible states of planets PLANET_STATES", "ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data", "Produces integrity errors! # return _Session() # db.session is managed by Flask-SQLAlchemy and", "ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name:", "Possible states of planets PLANET_STATES = { -1: (-1, \"unavailable\"), 0: (0, \"published\"),", "errors! # return _Session() # db.session is managed by Flask-SQLAlchemy and bound to", "(2, \"updating\") } # Possible states of 1ups ONEUP_STATES = { -1: \"disabled\",", "HTTP parameter: {}\".format(name)), } # Setup Blinker namespace notification_signals = blinker.Namespace() # Setup", "\"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session", "id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found: {}\".format(id)),", "sessionmaker that returns a session prefconfigured with the # model bindings from Nucleus", "of 1ups ONEUP_STATES = { -1: \"disabled\", 0: \"active\", 1: \"unknown author\" }", "name: (9, \"Missing HTTP parameter: {}\".format(name)), } # Setup Blinker namespace notification_signals =", "# Possible states of stars STAR_STATES = { -2: (-2, \"deleted\"), -1: (-1,", "\"published\"), 1: (1, \"draft\"), 2: (2, \"private\"), 3: (3, \"updating\") } # Possible", "\"Missing HTTP parameter: {}\".format(name)), } # Setup Blinker namespace notification_signals = blinker.Namespace() #", "logging.getLogger('nucleus') # Source formatting helper source_format = lambda address: None if address is", "\"\"\" # Produces integrity errors! # return _Session() # db.session is managed by", "\"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id:", "for an action\"\"\" pass # Import at bottom to avoid circular imports #", "parameter: {}\".format(name)), } # Setup Blinker namespace notification_signals = blinker.Namespace() # Setup logger", "\"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data for this", "\"private\"), 2: (2, \"updating\") } # Possible states of 1ups ONEUP_STATES = {", "signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona", "1: (1, \"private\"), 2: (2, \"updating\") } # Possible states of 1ups ONEUP_STATES", "re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma", "# Source formatting helper source_format = lambda address: None if address is None", "found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter: {}\".format(name)), } # Setup", "querying db binds from nucleus.models import * from vesicle import Vesicle # _Session", "binds from nucleus.models import * from vesicle import Vesicle # _Session is a", "request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\":", "\"published\"), 1: (1, \"private\"), 2: (2, \"updating\") } # Possible states of 1ups", "pass class VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's state does not allow", "\"disabled\", 0: \"active\", 1: \"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class", "is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error when the active Persona", "\"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda", "0: (0, \"published\"), 1: (1, \"draft\"), 2: (2, \"private\"), 3: (3, \"updating\") }", "allow for an action\"\"\" pass # Import at bottom to avoid circular imports", "from sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"),", "SQLAlchemy session object \"\"\" # Produces integrity errors! # return _Session() # db.session", "found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does", "blinker.Namespace() # Setup logger namespace logger = logging.getLogger('nucleus') # Source formatting helper source_format", "this error when the active Persona is not authorized for an action\"\"\" pass", "at bottom to avoid circular imports # Import all models to allow querying", "(6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\":", "(3, \"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data for", "Setup Blinker namespace notification_signals = blinker.Namespace() # Setup logger namespace logger = logging.getLogger('nucleus')", "= logging.getLogger('nucleus') # Source formatting helper source_format = lambda address: None if address", "planets PLANET_STATES = { -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"),", "} # Possible states of 1ups ONEUP_STATES = { -1: \"disabled\", 0: \"active\",", "name: (3, \"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data", "stars STAR_STATES = { -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"),", "* from vesicle import Vesicle # _Session is a custom sessionmaker that returns", "fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona profile", "an action\"\"\" pass # Import at bottom to avoid circular imports # Import", "\"private\"), 3: (3, \"updating\") } # Possible states of planets PLANET_STATES = {", "this error when a signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this", "InvalidSignatureError(Exception): \"\"\"Throw this error when a signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception):", "model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to", "-1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"), 2: (2, \"updating\") }", "available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error when the active Persona is not", "{}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter: {}\".format(name)), } # Setup Blinker", "(8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter: {}\".format(name)),", "= sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to be used for database connections", "action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's state does not", "\"\"\"Throw this error when the Persona profile specified for an action is not", "= { -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"), 2: (2,", "app from sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message type", "{}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda", "0: (0, \"published\"), 1: (1, \"private\"), 2: (2, \"updating\") } # Possible states", "import logging import blinker from web_ui import db, app from sqlalchemy.orm import sessionmaker", "\"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"), 2: (2, \"updating\") } # Possible", "a custom sessionmaker that returns a session prefconfigured with the # model bindings", "\"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not exist: \".format(name)), \"MISSING_KEY\": lambda name: (4,", "\"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate", "(5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7,", "CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when a signature", "{ -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"), 2: (2, \"updating\")", "\"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when a signature fails authenticity checks\"\"\"", "-2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"), 2:", "action is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error when the active", "\"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"), 2: (2, \"private\"), 3: (3, \"updating\")", "{ -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"),", "\"draft\"), 2: (2, \"private\"), 3: (3, \"updating\") } # Possible states of planets", "error when a signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error", "# Produces integrity errors! # return _Session() # db.session is managed by Flask-SQLAlchemy", "(0, \"published\"), 1: (1, \"draft\"), 2: (2, \"private\"), 3: (3, \"updating\") } #", "= blinker.Namespace() # Setup logger namespace logger = logging.getLogger('nucleus') # Source formatting helper", "type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object", "Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to be used for", "def create_session(): \"\"\"Return a session to be used for database connections Returns: Session:", "(7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\":", "\"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error", "UnauthorizedError(Exception): \"\"\"Throw this error when the active Persona is not authorized for an", "this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"),", "db binds from nucleus.models import * from vesicle import Vesicle # _Session is", "vesicle import Vesicle # _Session is a custom sessionmaker that returns a session", "the Persona profile specified for an action is not available\"\"\" pass class UnauthorizedError(Exception):", "states of planets PLANET_STATES = { -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1:", "not allow for an action\"\"\" pass # Import at bottom to avoid circular", "a session prefconfigured with the # model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app))", "(3, \"updating\") } # Possible states of planets PLANET_STATES = { -1: (-1,", "signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID:", "<filename>nucleus/__init__.py import logging import blinker from web_ui import db, app from sqlalchemy.orm import", "\\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars STAR_STATES = { -2: (-2,", "(-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"private\"), 2: (2, \"updating\") } #", "that returns a session prefconfigured with the # model bindings from Nucleus _Session", "session prefconfigured with the # model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def", "} # Setup Blinker namespace notification_signals = blinker.Namespace() # Setup logger namespace logger", "for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"), \"INVALID_SESSION\": (6, \"Session invalid. Please", "if address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars", "integrity errors! # return _Session() # db.session is managed by Flask-SQLAlchemy and bound", "helper source_format = lambda address: None if address is None else \\ \"{host}:{port}\".format(host=address[0],", "when the Persona profile specified for an action is not available\"\"\" pass class", "is not authorized for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error when", "(1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda", "= (\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when a signature fails", "(\"insert\", \"update\", \"delete\") class InvalidSignatureError(Exception): \"\"\"Throw this error when a signature fails authenticity", "Persona profile specified for an action is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw", "Possible states of 1ups ONEUP_STATES = { -1: \"disabled\", 0: \"active\", 1: \"unknown", "invalid. Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id:", "custom sessionmaker that returns a session prefconfigured with the # model bindings from", "lambda name: (9, \"Missing HTTP parameter: {}\".format(name)), } # Setup Blinker namespace notification_signals", "\".format(name)), \"MISSING_KEY\": lambda name: (4, \"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5,", "Session: SQLAlchemy session object \"\"\" # Produces integrity errors! # return _Session() #", "address: None if address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states", "STAR_STATES = { -2: (-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1:", "1: (1, \"draft\"), 2: (2, \"private\"), 3: (3, \"updating\") } # Possible states", "formatting helper source_format = lambda address: None if address is None else \\", "import Vesicle # _Session is a custom sessionmaker that returns a session prefconfigured", "a signature fails authenticity checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when the", "source_format = lambda address: None if address is None else \\ \"{host}:{port}\".format(host=address[0], port=address[1])", "Setup logger namespace logger = logging.getLogger('nucleus') # Source formatting helper source_format = lambda", "\"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars STAR_STATES = { -2: (-2, \"deleted\"),", "id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing HTTP parameter:", "connections Returns: Session: SQLAlchemy session object \"\"\" # Produces integrity errors! # return", "\"MISSING_PAYLOAD\": (2, \"No data payload found.\"), \"OBJECT_NOT_FOUND\": lambda name: (3, \"Object does not", "state does not allow for an action\"\"\" pass # Import at bottom to", "for an action is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this error when", "from vesicle import Vesicle # _Session is a custom sessionmaker that returns a", "return _Session() # db.session is managed by Flask-SQLAlchemy and bound to a request", "db, app from sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\": (1, \"No message", "Please re-authenticate.\"), \"DUPLICATE_ID\": lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8,", "sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return a session to be used for database connections Returns:", "from web_ui import db, app from sqlalchemy.orm import sessionmaker ERROR = { \"MISSING_MESSAGE_TYPE\":", "checks\"\"\" pass class PersonaNotFoundError(Exception): \"\"\"Throw this error when the Persona profile specified for", "lambda name: (4, \"Missing data for this request: {}\".format(name)), \"INVALID_SIGNATURE\": (5, \"Invalid signature.\"),", "blinker from web_ui import db, app from sqlalchemy.orm import sessionmaker ERROR = {", "\"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found: {}\".format(id)), \"MISSING_PARAMETER\": lambda name: (9, \"Missing", "for database connections Returns: Session: SQLAlchemy session object \"\"\" # Produces integrity errors!", "profile specified for an action is not available\"\"\" pass class UnauthorizedError(Exception): \"\"\"Throw this", "\"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"), 2: (2, \"private\"),", "avoid circular imports # Import all models to allow querying db binds from", "returns a session prefconfigured with the # model bindings from Nucleus _Session =", "with the # model bindings from Nucleus _Session = sessionmaker(bind=db.get_engine(app)) def create_session(): \"\"\"Return", "to avoid circular imports # Import all models to allow querying db binds", "class InvalidSignatureError(Exception): \"\"\"Throw this error when a signature fails authenticity checks\"\"\" pass class", "a session to be used for database connections Returns: Session: SQLAlchemy session object", "create_session(): \"\"\"Return a session to be used for database connections Returns: Session: SQLAlchemy", "2: (2, \"updating\") } # Possible states of 1ups ONEUP_STATES = { -1:", "3: (3, \"updating\") } # Possible states of planets PLANET_STATES = { -1:", "(-2, \"deleted\"), -1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"), 2: (2,", "circular imports # Import all models to allow querying db binds from nucleus.models", "-1: (-1, \"unavailable\"), 0: (0, \"published\"), 1: (1, \"draft\"), 2: (2, \"private\"), 3:", "(9, \"Missing HTTP parameter: {}\".format(name)), } # Setup Blinker namespace notification_signals = blinker.Namespace()", "None else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars STAR_STATES = {", "be used for database connections Returns: Session: SQLAlchemy session object \"\"\" # Produces", "pass # Import at bottom to avoid circular imports # Import all models", "1ups ONEUP_STATES = { -1: \"disabled\", 0: \"active\", 1: \"unknown author\" } CHANGE_TYPES", "lambda id: (7, \"Duplicate ID: {}\".format(id)), \"SOUMA_NOT_FOUND\": lambda id: (8, \"Souma not found:", "VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's state does not allow for an", "authorized for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw this error when a Vesicle's", "{ \"MISSING_MESSAGE_TYPE\": (1, \"No message type found.\"), \"MISSING_PAYLOAD\": (2, \"No data payload found.\"),", "the active Persona is not authorized for an action\"\"\" pass class VesicleStateError(Exception): \"\"\"Throw", "else \\ \"{host}:{port}\".format(host=address[0], port=address[1]) # Possible states of stars STAR_STATES = { -2:", "-1: \"disabled\", 0: \"active\", 1: \"unknown author\" } CHANGE_TYPES = (\"insert\", \"update\", \"delete\")" ]
[ "If seen, exit early. seen = {} for box_id in input: for i", "parser.add_argument( '-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file = args.input_file with open(input_file)", "Track all strings with each letter missing. # If seen, exit early. seen", "open(input_file) as f: input = [l.strip() for l in f.readlines()] print(part2(input)) if __name__", "= box_id[:i] + box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s) def main(): parser", "= args.input_file with open(input_file) as f: input = [l.strip() for l in f.readlines()]", "with open(input_file) as f: input = [l.strip() for l in f.readlines()] print(part2(input)) if", "letter missing. # If seen, exit early. seen = {} for box_id in", "# Track all strings with each letter missing. # If seen, exit early.", "seen[i]: print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', )", "s = box_id[:i] + box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s) def main():", "for i in range(len(box_id) - 1): if i not in seen: seen[i] =", "exit early. seen = {} for box_id in input: for i in range(len(box_id)", "box_id in input: for i in range(len(box_id) - 1): if i not in", "for box_id in input: for i in range(len(box_id) - 1): if i not", "in input: for i in range(len(box_id) - 1): if i not in seen:", "argparse def part2(input): # Track all strings with each letter missing. # If", "#!/usr/bin/env python import argparse def part2(input): # Track all strings with each letter", "as f: input = [l.strip() for l in f.readlines()] print(part2(input)) if __name__ ==", "range(len(box_id) - 1): if i not in seen: seen[i] = set() s =", "f: input = [l.strip() for l in f.readlines()] print(part2(input)) if __name__ == '__main__':", "in range(len(box_id) - 1): if i not in seen: seen[i] = set() s", "if s in seen[i]: print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f',", "in seen[i]: print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file',", "parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file =", "box_id[:i] + box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s) def main(): parser =", "box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument(", "args.input_file with open(input_file) as f: input = [l.strip() for l in f.readlines()] print(part2(input))", ") args = parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as f: input =", "<reponame>MartinPetkov/AdventOfCode #!/usr/bin/env python import argparse def part2(input): # Track all strings with each", "seen: seen[i] = set() s = box_id[:i] + box_id[i+1:] if s in seen[i]:", "seen, exit early. seen = {} for box_id in input: for i in", "set() s = box_id[:i] + box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s) def", "input = [l.strip() for l in f.readlines()] print(part2(input)) if __name__ == '__main__': main()", "# If seen, exit early. seen = {} for box_id in input: for", "print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args", "= set() s = box_id[:i] + box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s)", "s in seen[i]: print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt',", "default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as f:", "missing. # If seen, exit early. seen = {} for box_id in input:", "early. seen = {} for box_id in input: for i in range(len(box_id) -", "import argparse def part2(input): # Track all strings with each letter missing. #", "= argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file = args.input_file", "'-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as", "= parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as f: input = [l.strip() for", "i not in seen: seen[i] = set() s = box_id[:i] + box_id[i+1:] if", "args = parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as f: input = [l.strip()", "part2(input): # Track all strings with each letter missing. # If seen, exit", "strings with each letter missing. # If seen, exit early. seen = {}", "input_file = args.input_file with open(input_file) as f: input = [l.strip() for l in", "main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file", "{} for box_id in input: for i in range(len(box_id) - 1): if i", "python import argparse def part2(input): # Track all strings with each letter missing.", "all strings with each letter missing. # If seen, exit early. seen =", "if i not in seen: seen[i] = set() s = box_id[:i] + box_id[i+1:]", "in seen: seen[i] = set() s = box_id[:i] + box_id[i+1:] if s in", "argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0] input_file = args.input_file with", "def part2(input): # Track all strings with each letter missing. # If seen,", "input: for i in range(len(box_id) - 1): if i not in seen: seen[i]", "seen = {} for box_id in input: for i in range(len(box_id) - 1):", "seen[i].add(s) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args =", "seen[i] = set() s = box_id[:i] + box_id[i+1:] if s in seen[i]: print(s)", "- 1): if i not in seen: seen[i] = set() s = box_id[:i]", "+ box_id[i+1:] if s in seen[i]: print(s) seen[i].add(s) def main(): parser = argparse.ArgumentParser()", "= {} for box_id in input: for i in range(len(box_id) - 1): if", "parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as f: input = [l.strip() for l", "def main(): parser = argparse.ArgumentParser() parser.add_argument( '-f', default='input.txt', dest='input_file', ) args = parser.parse_known_args()[0]", "with each letter missing. # If seen, exit early. seen = {} for", "i in range(len(box_id) - 1): if i not in seen: seen[i] = set()", "not in seen: seen[i] = set() s = box_id[:i] + box_id[i+1:] if s", "each letter missing. # If seen, exit early. seen = {} for box_id", "1): if i not in seen: seen[i] = set() s = box_id[:i] +", "dest='input_file', ) args = parser.parse_known_args()[0] input_file = args.input_file with open(input_file) as f: input" ]
[ "#mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real data we took from the paper", "errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error =", "symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation plot", "sum(symmetric) #Total of Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation of Symmetric Summation", "of line fit dNi = sum(symmetric) #Total of Symmetric Summation stDni = st.stdev(symmetric)", "of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot", "the plot of real data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this is", "plot of real data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this is the", "real data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this is the data for", "= [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars =", "st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = []", "= st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\")", "import linregress import matplotlib.pyplot as mt import math import numpy as np import", "datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = [] symmetric = [] errorbars_symmetric", "bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on linefit of sinus mt.legend(\"Error bars\")", "symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi", "dNi = sum(symmetric) #Total of Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation of", "bar on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points", "= [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = [] symmetric =", "np import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric =", "fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on linefit of sinus mt.legend(\"Error", "import numpy as np import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data =", "mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation plot mt.show() chi_squared =", "symmetric = [] errorbars_symmetric = [] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k", "np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is", "Deviation of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the", "st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this", "#Standard Deviation of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is", "[20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = [] symmetric = [] errorbars_symmetric = []", "i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for", "[] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2)", "for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric)", "symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error", "points for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric", "#this is the points for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is", "scipy.stats import linregress import matplotlib.pyplot as mt import math import numpy as np", "dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on", "import matplotlib.pyplot as mt import math import numpy as np import statistics as", "we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this is the data for symmetric summation", "2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit)", "dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real data we took", "#Total of Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta", "errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)):", "fit dNi = sum(symmetric) #Total of Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation", "import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180]", "#Chi of line fit dNi = sum(symmetric) #Total of Symmetric Summation stDni =", "[] errorbars = [] symmetric = [] errorbars_symmetric = [] for i in", "sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points for symmetric symmetric summation", "#this is the error bar on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\")", "datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars", "range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in", "= [] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k])))", "data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this is the data for symmetric", "[] symmetric = [] errorbars_symmetric = [] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for", "stDni = st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\")", "in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l", "summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation plot mt.show() chi_squared", "the error bar on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is", "of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points for symmetric symmetric", "numpy as np import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16]", "mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real data", "dNarr) ** 2) #Chi of line fit dNi = sum(symmetric) #Total of Symmetric", "/ 2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1)", "mt import math import numpy as np import statistics as st datax =", "line fit dNi = sum(symmetric) #Total of Symmetric Summation stDni = st.stdev(symmetric) #Standard", "error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the", "bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points for symmetric symmetric summation mt.legend(\"Sinus function\")", "2) #Chi of line fit dNi = sum(symmetric) #Total of Symmetric Summation stDni", "mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real data we took from the", "matplotlib.pyplot as mt import math import numpy as np import statistics as st", "vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real data we", "errorbars_symmetric = [] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k])", "the fitting symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr) **", "for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l]", "sine) - dNarr) ** 2) #Chi of line fit dNi = sum(symmetric) #Total", "k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] /", "chi_squared = np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi of line fit dNi", "[43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = [] symmetric = []", "= np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on linefit", "the points for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting", "mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real data we took from", "data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = [] symmetric", "function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit,", "[20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = []", "<reponame>cangokceaslanx/2D-Scattering from scipy.stats import linregress import matplotlib.pyplot as mt import math import numpy", "= np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this", "Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of", "plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi of line", "is the fitting symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr)", "summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi of", "mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi of line fit", "mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points for symmetric symmetric summation mt.legend(\"Sinus", "of real data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this is the data", "= np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error", "color=\"red\") #this is the points for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this", "np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar", "linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points for symmetric", "symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine =", "- dNarr) ** 2) #Chi of line fit dNi = sum(symmetric) #Total of", "#this is the fitting symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) -", "linregress import matplotlib.pyplot as mt import math import numpy as np import statistics", "range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine", "= [] symmetric = [] errorbars_symmetric = [] for i in range(len(data)): errorbars.append(math.sqrt(data[i]))", "Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\") mt.ylabel(\"dN\") #mt.bar(datax,data,width=19,align='center',yerr=errorbars,color=\"gray\") #this is the plot of real", "mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr,", "in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr =", "= [] errorbars_symmetric = [] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in", "[] errorbars_symmetric = [] for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8):", "range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric)", "#this is the plot of real data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\")", "errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric)", "Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta vs dN\")", "is the error bar on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this", "in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8]))) for l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2)))", "np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi of line fit dNi = sum(symmetric)", "as mt import math import numpy as np import statistics as st datax", "Summation stDni = st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta vs dN\") mt.xlabel(\"Theta\")", "np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on linefit of", "slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\") #this is the error bar on linefit of sinus", "import math import numpy as np import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340]", "= [20,40,60,80,100,120,140,160,180] sin_symmetric = [] errorbars = [] symmetric = [] errorbars_symmetric =", "error bar on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the", "errorbars = [] symmetric = [] errorbars_symmetric = [] for i in range(len(data)):", "as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric =", "is the plot of real data we took from the paper #mt.bar(datax_symmetric,symmetric,width=19,align='center',yerr=errorbars_symmetric,color=\"green\") #this", "sin_symmetric = [] errorbars = [] symmetric = [] errorbars_symmetric = [] for", "= [] errorbars = [] symmetric = [] errorbars_symmetric = [] for i", "on linefit of sinus mt.legend(\"Error bars\") mt.scatter(sine,dNarr, color=\"red\") #this is the points for", "of Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation of Symmetric Summation mt.title(\"Theta vs", "statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric = [20,40,60,80,100,120,140,160,180] sin_symmetric", "from scipy.stats import linregress import matplotlib.pyplot as mt import math import numpy as", "as np import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data = [43,23,25,32,46,55,55,72,73,92,73,56,37,54,33,26,16] datax_symmetric", "mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine)", "for i in range(len(data)): errorbars.append(math.sqrt(data[i])) for k in range(8): symmetric.append(data[k]+data[16-k]) errorbars_symmetric.append(math.sqrt((data[k])+(data[16-k]))) symmetric.append(data[8]*2) errorbars_symmetric.append(math.sqrt((data[8])+(data[8])))", "math import numpy as np import statistics as st datax = [20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340] data", "symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation plot mt.show()", "** 2) #Chi of line fit dNi = sum(symmetric) #Total of Symmetric Summation", "l in range(len(symmetric)): sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr", "mt.scatter(sine,dNarr, color=\"red\") #this is the points for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\")", "= sum(symmetric) #Total of Symmetric Summation stDni = st.stdev(symmetric) #Standard Deviation of Symmetric", "= np.sum((np.polyval(fit, sine) - dNarr) ** 2) #Chi of line fit dNi =", "sin_symmetric.append(np.sin(math.radians(datax_symmetric[l] / 2))) sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr)", "for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the fitting symmetric summation", "fitting symmetric summation plot mt.show() chi_squared = np.sum((np.polyval(fit, sine) - dNarr) ** 2)", "sine = np.array(sin_symmetric) error = np.array(errorbars_symmetric) dNarr = np.array(symmetric) slope,intercept,rvalue,pvalue,stderr=linregress(sine,dNarr) fit=np.polyfit(sine,dNarr,1) bfl=np.poly1d(fit) mt.errorbar(sine,dNarr,yerr=errorbars_symmetric,linestyle=\"None\",color=\"red\")", "is the points for symmetric symmetric summation mt.legend(\"Sinus function\") mt.plot(sine,bfl(sine),color=\"green\") #this is the" ]
[ "\"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed ==", "[\"french_bulldog\", \"dalmatian\", \"shihtzu\", \"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed)", "= [\"french_bulldog\", \"dalmatian\", \"shihtzu\", \"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption:", "dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They", "dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They have the dog I want!\") break", "\"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They have the", "\"collie\"] dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want:", "<gh_stars>0 dog_breeds_available_for_adoption = [\"french_bulldog\", \"dalmatian\", \"shihtzu\", \"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed", "= \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They have", "\"dalmatian\", \"shihtzu\", \"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if", "in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They have the dog I want!\")", "dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They have the dog I", "dog_breeds_available_for_adoption = [\"french_bulldog\", \"dalmatian\", \"shihtzu\", \"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed in", "\"shihtzu\", \"poodle\", \"collie\"] dog_breed_I_want = \"collie\" for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed", "for dog_breed in dog_breeds_available_for_adoption: print(dog_breed) if dog_breed == dog_breed_I_want: print(\"They have the dog" ]
[ "None, use only the bands in the list (possibly writing empty timestream maps", "types=None, bpm='BolometerProperties'): ''' Split the input map given by input into several output", "@core.indexmod class SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name", "return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map", "input into several output maps named output_root + band + GHz (e.g. CalTimestreams150GHz", "return band if math.isnan(band) or math.isinf(band): return None if band < 0: return", "given by the BolometerProperties key. Return the same type of maps as the", "to the frame). Otherwise, creates maps for every band that exists in the", "input into several output maps named output_root + wafer (e.g. CalTimestreamsW172 with the", "property='wafer_id') @staticmethod def converter(wafer): if wafer is None: return None return str(wafer).capitalize() @core.indexmod", "self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer is None:", "splitting the input map. ''' if property is None: core.log_fatal(\"Property is a required", "get its band mapping from an alternative data source. ''' super(SplitByBand, self).__init__( input=input,", "output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input map given by input into several", "pixel types of the detectors as given by the BolometerProperties key. Return the", "input map. ''' if property is None: core.log_fatal(\"Property is a required argument\") self.bpmattr", "with the default options). Arguments --------- input : str Key name of the", "or None if the argument is invalid. Overload this function in subclasses of", "into several based on the pixel types of the detectors as given by", "the property to its corresponding string name. Returns a string representation of the", "several based on the wafers of the detectors as given by the BolometerProperties", "self.bpmattr = property self.input = input self.output_root = output_root if output_root is not", "input : str Key name of the input map to split. property :", "input into several output maps named output_root + key (e.g. CalTimestreams + str(property))", "str) else x for x in property_list] else: self.props = None self.bpmkey =", "__init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input map given by input", "for prop in self.props: out[prop] = type(inmap)() for b in inmap.keys(): try: prop", "= type(inmap)() else: continue out[prop][b] = inmap[b] for prop in out.keys(): frame['%s%s' %", "pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a': return None if pixel_type.islower(): return pixel_type.capitalize()", "keyed by bolometer name and split it into several based on the pixel", "this to get its band mapping from an alternative data source. ''' super(SplitByBand,", "= bpm self.bpm = None @staticmethod def converter(prop): \"\"\" Function for converting the", "G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split", "converted to strings using the `SplitByProperty.converter` method. If property_list is not None, use", "it into several based on the bands of the detectors as given by", "Prefix for the output keys. If None (default), use `input` as the output", "to get its band mapping from an alternative data source. ''' super(SplitByBand, self).__init__(", "def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input map given by", "handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): '''", "list (possibly writing empty timestream maps to the frame). Otherwise, creates maps for", "band < 0: return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def", "options). Arguments --------- input : str Key name of the input map to", "self.props is not None: for prop in self.props: out[prop] = type(inmap)() for b", "representation of the input argument, or None if the argument is invalid. Overload", "str Attribute name to extract from the BolometerProperties object. Required. property_list : list", "Properties to include in the output keys. Entries that are not strings will", "property_list is not None, use only the names in the list (possibly writing", "pixel_type is None: return None if not pixel_type: return None pixel_type = str(pixel_type)", "the bands in the list (possibly writing empty timestream maps to the frame).", "frame: self.bpm = frame[self.bpmkey] if self.input not in frame: return inmap = frame[self.input]", "as given by the BolometerProperties key. Return the same type of maps as", "maps for every band that exists in the input. Setting bpm to a", "was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'):", "is not None, use only the wafers in the list (possibly writing empty", "BolometerProperties from spt3g import core import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer',", "named output_root + band + GHz (e.g. CalTimestreams150GHz with the default options). If", "input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band, str): return band", "Setting bpm to a non-default value causes this to get its wafer mapping", "return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'):", "math.isnan(band) or math.isinf(band): return None if band < 0: return None return '%dGHz'", "else input if property_list is not None: self.props = [self.converter(x) if not isinstance(x,", "str): return band if math.isnan(band) or math.isinf(band): return None if band < 0:", "string name. Returns a string representation of the input argument, or None if", "@staticmethod def converter(wafer): if wafer is None: return None return str(wafer).capitalize() @core.indexmod class", "output keys. If None (default), use `input` as the output root. bpm :", "import BolometerProperties from spt3g import core import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand',", "property of the detectors as given by the BolometerProperties key. Return the same", "''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input map given", "def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand,", "to change how attributes are parsed into their string representations. \"\"\" if prop", "super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is", "strings using the `SplitByProperty.converter` method. If property_list is not None, use only the", "the BolometerPropertiesMap from which to extract the requested `property` for splitting the input", "return None if not pixel_type: return None pixel_type = str(pixel_type) if pixel_type.lower() ==", "into several based on the property of the detectors as given by the", "are not strings will be converted to strings using the `SplitByProperty.converter` method. If", "else: self.props = None self.bpmkey = bpm self.bpm = None @staticmethod def converter(prop):", "= input self.output_root = output_root if output_root is not None else input if", "+ GHz (e.g. CalTimestreams150GHz with the default options). If bands is not None,", "to include in the output keys. Entries that are not strings will be", "deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty):", ": list of properties Properties to include in the output keys. Entries that", "several output maps named output_root + band + GHz (e.g. CalTimestreams150GHz with the", "of the input argument, or None if the argument is invalid. Overload this", "detectors as given by the BolometerProperties key. Return the same type of maps", "by bolometer name and split it into several based on the pixel types", "writing empty timestream maps to the frame). Otherwise, creates maps for every band", "by input into several output maps named output_root + key (e.g. CalTimestreams +", "strings will be converted to strings using the `SplitByProperty.converter` method. If property_list is", "''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer", "None: return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative", "bands is not None, use only the bands in the list (possibly writing", "use only the bands in the list (possibly writing empty timestream maps to", "is None: return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input", "str(pixel_type) if pixel_type.lower() == 'n/a': return None if pixel_type.islower(): return pixel_type.capitalize() return pixel_type", "is a required argument\") self.bpmattr = property self.input = input self.output_root = output_root", "if prop not in out: if self.props is None and prop is not", "None: return None if not pixel_type: return None pixel_type = str(pixel_type) if pixel_type.lower()", "G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the", "is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class", "KeyError: continue if prop not in out: if self.props is None and prop", "it into several based on the property of the detectors as given by", "input map given by input into several output maps named output_root + band", "to get its wafer mapping from an alternative data source. ''' super(SplitByPixelType, self).__init__(", "invalid. Overload this function in subclasses of SplitByProperty to change how attributes are", "None: out[prop] = type(inmap)() else: continue out[prop][b] = inmap[b] for prop in out.keys():", "G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input", "creates maps for every wafer that exists in the input. Setting bpm to", "type(inmap)() else: continue out[prop][b] = inmap[b] for prop in out.keys(): frame['%s%s' % (self.output_root,", "prop is None: return None return str(prop) def __call__(self, frame): if self.bpmkey in", "requested `property` for splitting the input map. ''' if property is None: core.log_fatal(\"Property", "it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None,", "except KeyError: continue if prop not in out: if self.props is None and", "bands of the detectors as given by the BolometerProperties key. Return the same", "it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None,", "of the BolometerPropertiesMap from which to extract the requested `property` for splitting the", "use `input` as the output root. bpm : str The key name of", "wafers in the list (possibly writing empty timestream maps to the frame). Otherwise,", "spt3g.calibration import BolometerProperties from spt3g import core import math __all__ = ['SplitByProperty', 'SplitByBand',", "bpm self.bpm = None @staticmethod def converter(prop): \"\"\" Function for converting the property", "= property self.input = input self.output_root = output_root if output_root is not None", "in frame: return inmap = frame[self.input] out = {} if self.props is not", "bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by", "use only the names in the list (possibly writing empty timestream maps to", "bands in the list (possibly writing empty timestream maps to the frame). Otherwise,", "only the names in the list (possibly writing empty timestream maps to the", "options). If wafers is not None, use only the wafers in the list", "property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is None: return None if not pixel_type:", "'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map keyed by", "its corresponding string name. Returns a string representation of the input argument, or", "this function in subclasses of SplitByProperty to change how attributes are parsed into", "@core.indexmod class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name", "= [self.converter(x) if not isinstance(x, str) else x for x in property_list] else:", "for converting the property to its corresponding string name. Returns a string representation", "'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map", "def converter(pixel_type): if pixel_type is None: return None if not pixel_type: return None", "math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take", "% (self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative", "based on the property of the detectors as given by the BolometerProperties key.", "G3FrameObject-derivative Map keyed by bolometer name and split it into several based on", "use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): '''", "writing empty timestream maps to the frame). Otherwise, creates maps for every that", "etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input map", "for every wafer that exists in the input. Setting bpm to a non-default", "split it into several based on the wafers of the detectors as given", "self.bpmattr)) except KeyError: continue if prop not in out: if self.props is None", "\"\"\" if prop is None: return None return str(prop) def __call__(self, frame): if", "of maps as the one it was handed, e.g. G3TimestreamMap, G3MapInt, etc. '''", "= frame[self.bpmkey] if self.input not in frame: return inmap = frame[self.input] out =", "given by input into several output maps named output_root + key (e.g. CalTimestreams", "alternative data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def", "If wafers is not None, use only the wafers in the list (possibly", "instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an", ": str The key name of the BolometerPropertiesMap from which to extract the", "''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input map given", "is None: core.log_fatal(\"Property is a required argument\") self.bpmattr = property self.input = input", "core import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object):", "the same type of maps as the one it was handed, e.g. G3TimestreamMap,", "several based on the bands of the detectors as given by the BolometerProperties", "<reponame>tskisner/spt3g_software<gh_stars>1-10 from spt3g.calibration import BolometerProperties from spt3g import core import math __all__ =", "input G3FrameObject-derivative Map keyed by bolometer name and split it into several based", "If property_list is not None, use only the names in the list (possibly", "return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None,", "argument is invalid. Overload this function in subclasses of SplitByProperty to change how", "get its wafer mapping from an alternative data source. ''' super(SplitByWafer, self).__init__( input=input,", "if self.props is not None: for prop in self.props: out[prop] = type(inmap)() for", "maps to the frame). Otherwise, creates maps for every band that exists in", "% int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is", "def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input map given by", "every that exists in the input. output_root : str Prefix for the output", "__call__(self, frame): if self.bpmkey in frame: self.bpm = frame[self.bpmkey] if self.input not in", "exists in the input. Setting bpm to a non-default value causes this to", "input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input map given by input into", "output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is None: return None", "SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take", "of the input map to split. property : str Attribute name to extract", "BolometerProperties key. Return the same type of maps as the one it was", "of properties Properties to include in the output keys. Entries that are not", "None, use only the names in the list (possibly writing empty timestream maps", "input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map", "converting the property to its corresponding string name. Returns a string representation of", "in self.props: out[prop] = type(inmap)() for b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b],", "map given by input into several output maps named output_root + band +", "the default options). If wafers is not None, use only the wafers in", "to get its wafer mapping from an alternative data source. ''' super(SplitByWafer, self).__init__(", "(possibly writing empty timestream maps to the frame). Otherwise, creates maps for every", "b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop", "required argument\") self.bpmattr = property self.input = input self.output_root = output_root if output_root", "empty timestream maps to the frame). Otherwise, creates maps for every that exists", "e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): '''", "property='band') @staticmethod def converter(band): if isinstance(band, str): return band if math.isnan(band) or math.isinf(band):", "core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod", "if pixel_type is None: return None if not pixel_type: return None pixel_type =", "''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input map", "the list (possibly writing empty timestream maps to the frame). Otherwise, creates maps", "which to extract the requested `property` for splitting the input map. ''' if", "non-default value causes this to get its band mapping from an alternative data", "keyed by bolometer name and split it into several based on the wafers", "output maps named output_root + key (e.g. CalTimestreams + str(property)) with the default", "the BolometerProperties object. Required. property_list : list of properties Properties to include in", "None else input if property_list is not None: self.props = [self.converter(x) if not", "G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input", "is not None else input if property_list is not None: self.props = [self.converter(x)", "Function for converting the property to its corresponding string name. Returns a string", "property_list : list of properties Properties to include in the output keys. Entries", "the wafers of the detectors as given by the BolometerProperties key. Return the", "inmap = frame[self.input] out = {} if self.props is not None: for prop", "in property_list] else: self.props = None self.bpmkey = bpm self.bpm = None @staticmethod", "from the BolometerProperties object. Required. property_list : list of properties Properties to include", "input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input map given by input into", "not None else input if property_list is not None: self.props = [self.converter(x) if", "every wafer that exists in the input. Setting bpm to a non-default value", "continue if prop not in out: if self.props is None and prop is", "__init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__(", "property is None: core.log_fatal(\"Property is a required argument\") self.bpmattr = property self.input =", "name and split it into several based on the bands of the detectors", "to strings using the `SplitByProperty.converter` method. If property_list is not None, use only", "def converter(prop): \"\"\" Function for converting the property to its corresponding string name.", "that are not strings will be converted to strings using the `SplitByProperty.converter` method.", "several based on the property of the detectors as given by the BolometerProperties", "the frame). Otherwise, creates maps for every band that exists in the input.", "from spt3g import core import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType']", "self.input = input self.output_root = output_root if output_root is not None else input", "e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split", "value causes this to get its band mapping from an alternative data source.", "of SplitByProperty to change how attributes are parsed into their string representations. \"\"\"", "by input into several output maps named output_root + band + GHz (e.g.", "SplitByProperty to change how attributes are parsed into their string representations. \"\"\" if", "in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop not", "keyed by bolometer name and split it into several based on the property", "property : str Attribute name to extract from the BolometerProperties object. Required. property_list", "Attribute name to extract from the BolometerProperties object. Required. property_list : list of", "continue out[prop][b] = inmap[b] for prop in out.keys(): frame['%s%s' % (self.output_root, prop)] =", "Take an input G3FrameObject-derivative Map keyed by bolometer name and split it into", "+ band + GHz (e.g. CalTimestreams150GHz with the default options). If bands is", "bolometer name and split it into several based on the bands of the", "it into several based on the pixel types of the detectors as given", "input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input map given by input into", "input map given by input into several output maps named output_root + key", "to a non-default value causes this to get its wafer mapping from an", "None pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a': return None if pixel_type.islower(): return", "the default options). Arguments --------- input : str Key name of the input", "if the argument is invalid. Overload this function in subclasses of SplitByProperty to", "for x in property_list] else: self.props = None self.bpmkey = bpm self.bpm =", "None, use only the wafers in the list (possibly writing empty timestream maps", "prop in self.props: out[prop] = type(inmap)() for b in inmap.keys(): try: prop =", "wafer that exists in the input. Setting bpm to a non-default value causes", "the output root. bpm : str The key name of the BolometerPropertiesMap from", "etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input map", "for b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if", "name of the BolometerPropertiesMap from which to extract the requested `property` for splitting", "by the BolometerProperties key. Return the same type of maps as the one", "output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input map given by input into several", "given by input into several output maps named output_root + band + GHz", "type of maps as the one it was handed, e.g. G3TimestreamMap, G3MapInt, etc.", "split. property : str Attribute name to extract from the BolometerProperties object. Required.", "''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band,", "wafer mapping from an alternative data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers,", "band + GHz (e.g. CalTimestreams150GHz with the default options). If bands is not", "converter(prop): \"\"\" Function for converting the property to its corresponding string name. Returns", "prop in out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): '''", "= str(pixel_type) if pixel_type.lower() == 'n/a': return None if pixel_type.islower(): return pixel_type.capitalize() return", "handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): '''", "e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split", "converter(band): if isinstance(band, str): return band if math.isnan(band) or math.isinf(band): return None if", "SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and split", "property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band, str): return band if math.isnan(band)", "not None, use only the wafers in the list (possibly writing empty timestream", "in out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take", "return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by", "with the default options). If bands is not None, use only the bands", "@core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use", "bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer is None: return None return str(wafer).capitalize()", "CalTimestreams + str(property)) with the default options). Arguments --------- input : str Key", "bolometer name and split it into several based on the wafers of the", "a non-default value causes this to get its band mapping from an alternative", "data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type):", "keys. Entries that are not strings will be converted to strings using the", "maps for every that exists in the input. output_root : str Prefix for", "input. Setting bpm to a non-default value causes this to get its band", "(e.g. CalTimestreamsW172 with the default options). If wafers is not None, use only", "an alternative data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod", "empty timestream maps to the frame). Otherwise, creates maps for every band that", "[self.converter(x) if not isinstance(x, str) else x for x in property_list] else: self.props", "parsed into their string representations. \"\"\" if prop is None: return None return", "the detectors as given by the BolometerProperties key. Return the same type of", "on the wafers of the detectors as given by the BolometerProperties key. Return", "creates maps for every that exists in the input. output_root : str Prefix", "the input map to split. property : str Attribute name to extract from", "str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer", "is None: return None if not pixel_type: return None pixel_type = str(pixel_type) if", "bpm='BolometerProperties'): ''' Split the input map given by input into several output maps", "get its wafer mapping from an alternative data source. ''' super(SplitByPixelType, self).__init__( input=input,", "by input into several output maps named output_root + wafer (e.g. CalTimestreamsW172 with", "to extract the requested `property` for splitting the input map. ''' if property", "alternative data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def", "the input map given by input into several output maps named output_root +", "to a non-default value causes this to get its band mapping from an", "and split it into several based on the bands of the detectors as", "@staticmethod def converter(band): if isinstance(band, str): return band if math.isnan(band) or math.isinf(band): return", "bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm)", "based on the pixel types of the detectors as given by the BolometerProperties", "timestream maps to the frame). Otherwise, creates maps for every wafer that exists", "output_root=None, bpm='BolometerProperties'): ''' Split the input map given by input into several output", "= ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an input", "prop is not None: out[prop] = type(inmap)() else: continue out[prop][b] = inmap[b] for", "and split it into several based on the wafers of the detectors as", "list of properties Properties to include in the output keys. Entries that are", "property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is None: return None if", "map to split. property : str Attribute name to extract from the BolometerProperties", "is invalid. Overload this function in subclasses of SplitByProperty to change how attributes", "(self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map", "CalTimestreamsW172 with the default options). If wafers is not None, use only the", "super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input", "was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'):", "use only the wafers in the list (possibly writing empty timestream maps to", "handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): '''", "band mapping from an alternative data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands,", "this to get its wafer mapping from an alternative data source. ''' super(SplitByWafer,", "__init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input map given by", "frame). Otherwise, creates maps for every band that exists in the input. Setting", "frame). Otherwise, creates maps for every that exists in the input. output_root :", "that exists in the input. Setting bpm to a non-default value causes this", "are parsed into their string representations. \"\"\" if prop is None: return None", "output_root if output_root is not None else input if property_list is not None:", "or math.isinf(band): return None if band < 0: return None return '%dGHz' %", "this to get its wafer mapping from an alternative data source. ''' super(SplitByPixelType,", "inmap[b] for prop in out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod class", "bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is None: return None if not", "None: return None return str(prop) def __call__(self, frame): if self.bpmkey in frame: self.bpm", "split it into several based on the bands of the detectors as given", "core.log_fatal(\"Property is a required argument\") self.bpmattr = property self.input = input self.output_root =", "input. output_root : str Prefix for the output keys. If None (default), use", "if not pixel_type: return None pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a': return", "G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the", "Key name of the input map to split. property : str Attribute name", "to the frame). Otherwise, creates maps for every wafer that exists in the", "None: core.log_fatal(\"Property is a required argument\") self.bpmattr = property self.input = input self.output_root", "if property is None: core.log_fatal(\"Property is a required argument\") self.bpmattr = property self.input", "''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type", "property_list] else: self.props = None self.bpmkey = bpm self.bpm = None @staticmethod def", "return None pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a': return None if pixel_type.islower():", "into several output maps named output_root + band + GHz (e.g. CalTimestreams150GHz with", "bolometer name and split it into several based on the pixel types of", "data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer):", "if wafer is None: return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take", "several based on the pixel types of the detectors as given by the", "output_root + band + GHz (e.g. CalTimestreams150GHz with the default options). If bands", "function in subclasses of SplitByProperty to change how attributes are parsed into their", "__init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input map given by input", "= out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by", "'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map keyed by bolometer", "alternative data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def", "using the `SplitByProperty.converter` method. If property_list is not None, use only the names", "= None self.bpmkey = bpm self.bpm = None @staticmethod def converter(prop): \"\"\" Function", "output_root : str Prefix for the output keys. If None (default), use `input`", "bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band, str): return band if math.isnan(band) or", "x for x in property_list] else: self.props = None self.bpmkey = bpm self.bpm", "from an alternative data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type')", "None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None,", "+ str(property)) with the default options). Arguments --------- input : str Key name", "named output_root + key (e.g. CalTimestreams + str(property)) with the default options). Arguments", "bpm : str The key name of the BolometerPropertiesMap from which to extract", "into their string representations. \"\"\" if prop is None: return None return str(prop)", "x in property_list] else: self.props = None self.bpmkey = bpm self.bpm = None", "self.props is None and prop is not None: out[prop] = type(inmap)() else: continue", "else: continue out[prop][b] = inmap[b] for prop in out.keys(): frame['%s%s' % (self.output_root, prop)]", "str Prefix for the output keys. If None (default), use `input` as the", "by bolometer name and split it into several based on the bands of", "@core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name", "the `SplitByProperty.converter` method. If property_list is not None, use only the names in", "several output maps named output_root + key (e.g. CalTimestreams + str(property)) with the", "str Key name of the input map to split. property : str Attribute", "super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band, str):", "output keys. Entries that are not strings will be converted to strings using", "+ key (e.g. CalTimestreams + str(property)) with the default options). Arguments --------- input", "frame[self.bpmkey] if self.input not in frame: return inmap = frame[self.input] out = {}", "map. ''' if property is None: core.log_fatal(\"Property is a required argument\") self.bpmattr =", "prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop not in out: if", "its wafer mapping from an alternative data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root,", "['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an input G3FrameObject-derivative", "math.isinf(band): return None if band < 0: return None return '%dGHz' % int(band/core.G3Units.GHz)", "for every that exists in the input. output_root : str Prefix for the", "import core import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class", "None return str(prop) def __call__(self, frame): if self.bpmkey in frame: self.bpm = frame[self.bpmkey]", "for splitting the input map. ''' if property is None: core.log_fatal(\"Property is a", "is not None, use only the bands in the list (possibly writing empty", "= {} if self.props is not None: for prop in self.props: out[prop] =", ": str Key name of the input map to split. property : str", "self.bpmkey in frame: self.bpm = frame[self.bpmkey] if self.input not in frame: return inmap", "band if math.isnan(band) or math.isinf(band): return None if band < 0: return None", "''' if property is None: core.log_fatal(\"Property is a required argument\") self.bpmattr = property", "self.bpmkey = bpm self.bpm = None @staticmethod def converter(prop): \"\"\" Function for converting", "if self.input not in frame: return inmap = frame[self.input] out = {} if", "None and prop is not None: out[prop] = type(inmap)() else: continue out[prop][b] =", "timestream maps to the frame). Otherwise, creates maps for every band that exists", "return None if band < 0: return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod", "not in out: if self.props is None and prop is not None: out[prop]", "SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and split", "`input` as the output root. bpm : str The key name of the", "for the output keys. If None (default), use `input` as the output root.", "wafer (e.g. CalTimestreamsW172 with the default options). If wafers is not None, use", "timestream maps to the frame). Otherwise, creates maps for every that exists in", "several output maps named output_root + wafer (e.g. CalTimestreamsW172 with the default options).", "def converter(wafer): if wafer is None: return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty):", "@core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name", "def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the input map given by", "output_root + key (e.g. CalTimestreams + str(property)) with the default options). Arguments ---------", "with the default options). If wafers is not None, use only the wafers", "default options). If wafers is not None, use only the wafers in the", "def __call__(self, frame): if self.bpmkey in frame: self.bpm = frame[self.bpmkey] if self.input not", "bpm to a non-default value causes this to get its wafer mapping from", "frame[self.input] out = {} if self.props is not None: for prop in self.props:", "input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input,", "self.input not in frame: return inmap = frame[self.input] out = {} if self.props", "change how attributes are parsed into their string representations. \"\"\" if prop is", "Arguments --------- input : str Key name of the input map to split.", "output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root,", "= type(inmap)() for b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError:", "every band that exists in the input. Setting bpm to a non-default value", "def converter(band): if isinstance(band, str): return band if math.isnan(band) or math.isinf(band): return None", "SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and split", "the input argument, or None if the argument is invalid. Overload this function", "the default options). If bands is not None, use only the bands in", "not None: out[prop] = type(inmap)() else: continue out[prop][b] = inmap[b] for prop in", "only the bands in the list (possibly writing empty timestream maps to the", "from spt3g.calibration import BolometerProperties from spt3g import core import math __all__ = ['SplitByProperty',", "output root. bpm : str The key name of the BolometerPropertiesMap from which", "if band < 0: return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand):", "Returns a string representation of the input argument, or None if the argument", "`SplitByProperty.converter` method. If property_list is not None, use only the names in the", "the requested `property` for splitting the input map. ''' if property is None:", "split it into several based on the property of the detectors as given", "spt3g import core import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod", "as the output root. bpm : str The key name of the BolometerPropertiesMap", "if output_root is not None else input if property_list is not None: self.props", "input. Setting bpm to a non-default value causes this to get its wafer", "if isinstance(band, str): return band if math.isnan(band) or math.isinf(band): return None if band", "maps named output_root + wafer (e.g. CalTimestreamsW172 with the default options). If wafers", "object. Required. property_list : list of properties Properties to include in the output", "it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None,", "into several based on the bands of the detectors as given by the", "value causes this to get its wafer mapping from an alternative data source.", "maps named output_root + key (e.g. CalTimestreams + str(property)) with the default options).", "not None: self.props = [self.converter(x) if not isinstance(x, str) else x for x", "SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\")", "self.bpm = None @staticmethod def converter(prop): \"\"\" Function for converting the property to", "wafers of the detectors as given by the BolometerProperties key. Return the same", "map given by input into several output maps named output_root + key (e.g.", "the BolometerProperties key. Return the same type of maps as the one it", "isinstance(x, str) else x for x in property_list] else: self.props = None self.bpmkey", "it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None,", "G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input", "self.props = [self.converter(x) if not isinstance(x, str) else x for x in property_list]", "in subclasses of SplitByProperty to change how attributes are parsed into their string", "how attributes are parsed into their string representations. \"\"\" if prop is None:", "'%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand", "handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'):", "include in the output keys. Entries that are not strings will be converted", "attributes are parsed into their string representations. \"\"\" if prop is None: return", "causes this to get its wafer mapping from an alternative data source. '''", "output_root + wafer (e.g. CalTimestreamsW172 with the default options). If wafers is not", "str(property)) with the default options). Arguments --------- input : str Key name of", "name of the input map to split. property : str Attribute name to", "bands=None, bpm='BolometerProperties'): ''' Split the input map given by input into several output", "an alternative data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod", "mapping from an alternative data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm,", "root. bpm : str The key name of the BolometerPropertiesMap from which to", "to extract from the BolometerProperties object. Required. property_list : list of properties Properties", "self.props = None self.bpmkey = bpm self.bpm = None @staticmethod def converter(prop): \"\"\"", "frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an input", "maps for every wafer that exists in the input. Setting bpm to a", "not pixel_type: return None pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a': return None", "not None: for prop in self.props: out[prop] = type(inmap)() for b in inmap.keys():", "mapping from an alternative data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm,", "is None: return None return str(prop) def __call__(self, frame): if self.bpmkey in frame:", "argument, or None if the argument is invalid. Overload this function in subclasses", "if not isinstance(x, str) else x for x in property_list] else: self.props =", "frame). Otherwise, creates maps for every wafer that exists in the input. Setting", "property to its corresponding string name. Returns a string representation of the input", "Otherwise, creates maps for every band that exists in the input. Setting bpm", "''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input map given", "the argument is invalid. Overload this function in subclasses of SplitByProperty to change", "Entries that are not strings will be converted to strings using the `SplitByProperty.converter`", "for every band that exists in the input. Setting bpm to a non-default", "input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is None: return", "class SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and", "not strings will be converted to strings using the `SplitByProperty.converter` method. If property_list", "will be converted to strings using the `SplitByProperty.converter` method. If property_list is not", "the names in the list (possibly writing empty timestream maps to the frame).", "was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None,", "prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed", "output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed", "keys. If None (default), use `input` as the output root. bpm : str", "@staticmethod def converter(pixel_type): if pixel_type is None: return None if not pixel_type: return", "output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input map given by input into several", "property self.input = input self.output_root = output_root if output_root is not None else", "out[prop] = type(inmap)() for b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except", "named output_root + wafer (e.g. CalTimestreamsW172 with the default options). If wafers is", "input argument, or None if the argument is invalid. Overload this function in", "out[prop] = type(inmap)() else: continue out[prop][b] = inmap[b] for prop in out.keys(): frame['%s%s'", ": str Attribute name to extract from the BolometerProperties object. Required. property_list :", "Map keyed by bolometer name and split it into several based on the", "in frame: self.bpm = frame[self.bpmkey] if self.input not in frame: return inmap =", ": str Prefix for the output keys. If None (default), use `input` as", "name and split it into several based on the wafers of the detectors", "name and split it into several based on the pixel types of the", "to the frame). Otherwise, creates maps for every that exists in the input.", "string representations. \"\"\" if prop is None: return None return str(prop) def __call__(self,", "input map given by input into several output maps named output_root + wafer", "out: if self.props is None and prop is not None: out[prop] = type(inmap)()", "Otherwise, creates maps for every wafer that exists in the input. Setting bpm", "Required. property_list : list of properties Properties to include in the output keys.", "on the bands of the detectors as given by the BolometerProperties key. Return", "''' Take an input G3FrameObject-derivative Map keyed by bolometer name and split it", "G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'): ''' Split the", "the one it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams',", "the frame). Otherwise, creates maps for every wafer that exists in the input.", "is not None, use only the names in the list (possibly writing empty", "out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer", "key. Return the same type of maps as the one it was handed,", "wafers=None, bpm='BolometerProperties'): ''' Split the input map given by input into several output", "is not None: for prop in self.props: out[prop] = type(inmap)() for b in", "maps named output_root + band + GHz (e.g. CalTimestreams150GHz with the default options).", "GHz (e.g. CalTimestreams150GHz with the default options). If bands is not None, use", "self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band, str): return", "else x for x in property_list] else: self.props = None self.bpmkey = bpm", "extract the requested `property` for splitting the input map. ''' if property is", "corresponding string name. Returns a string representation of the input argument, or None", "wafer is None: return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an", "BolometerProperties object. Required. property_list : list of properties Properties to include in the", "in the list (possibly writing empty timestream maps to the frame). Otherwise, creates", "from which to extract the requested `property` for splitting the input map. '''", "--------- input : str Key name of the input map to split. property", "None (default), use `input` as the output root. bpm : str The key", "converter(pixel_type): if pixel_type is None: return None if not pixel_type: return None pixel_type", "`property` for splitting the input map. ''' if property is None: core.log_fatal(\"Property is", "out[prop][b] = inmap[b] for prop in out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop]", "not None, use only the names in the list (possibly writing empty timestream", "= None @staticmethod def converter(prop): \"\"\" Function for converting the property to its", "only the wafers in the list (possibly writing empty timestream maps to the", "and split it into several based on the pixel types of the detectors", "class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and", "properties Properties to include in the output keys. Entries that are not strings", "and split it into several based on the property of the detectors as", "one it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None,", "(e.g. CalTimestreams + str(property)) with the default options). Arguments --------- input : str", "types of the detectors as given by the BolometerProperties key. Return the same", "names in the list (possibly writing empty timestream maps to the frame). Otherwise,", "to its corresponding string name. Returns a string representation of the input argument,", "if prop is None: return None return str(prop) def __call__(self, frame): if self.bpmkey", "self.props: out[prop] = type(inmap)() for b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr))", "name and split it into several based on the property of the detectors", "Return the same type of maps as the one it was handed, e.g.", "G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the", "writing empty timestream maps to the frame). Otherwise, creates maps for every wafer", "the input. Setting bpm to a non-default value causes this to get its", "__init__(self, input='CalTimestreams', output_root=None, wafers=None, bpm='BolometerProperties'): ''' Split the input map given by input", "on the pixel types of the detectors as given by the BolometerProperties key.", "key (e.g. CalTimestreams + str(property)) with the default options). Arguments --------- input :", "and prop is not None: out[prop] = type(inmap)() else: continue out[prop][b] = inmap[b]", "'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an input G3FrameObject-derivative Map keyed", "non-default value causes this to get its wafer mapping from an alternative data", "e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split", "it into several based on the wafers of the detectors as given by", "in the output keys. Entries that are not strings will be converted to", "self).__init__( input=input, output_root=output_root, bands=bands, bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative", "band that exists in the input. Setting bpm to a non-default value causes", "property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input map given by input into several", "the bands of the detectors as given by the BolometerProperties key. Return the", "self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if pixel_type is None:", "based on the bands of the detectors as given by the BolometerProperties key.", "bpm to a non-default value causes this to get its band mapping from", "the wafers in the list (possibly writing empty timestream maps to the frame).", "for prop in out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty):", "mapping from an alternative data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm,", "name. Returns a string representation of the input argument, or None if the", "prop not in out: if self.props is None and prop is not None:", "output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if isinstance(band, str): return band if", "key name of the BolometerPropertiesMap from which to extract the requested `property` for", "= output_root if output_root is not None else input if property_list is not", "if self.bpmkey in frame: self.bpm = frame[self.bpmkey] if self.input not in frame: return", "self.bpm = frame[self.bpmkey] if self.input not in frame: return inmap = frame[self.input] out", "None: for prop in self.props: out[prop] = type(inmap)() for b in inmap.keys(): try:", "split it into several based on the pixel types of the detectors as", "wafer mapping from an alternative data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types,", "in out: if self.props is None and prop is not None: out[prop] =", "property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer is None: return None return", "a required argument\") self.bpmattr = property self.input = input self.output_root = output_root if", "the pixel types of the detectors as given by the BolometerProperties key. Return", "not isinstance(x, str) else x for x in property_list] else: self.props = None", "etc. ''' def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input", "argument\") self.bpmattr = property self.input = input self.output_root = output_root if output_root is", "__all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): ''' Take an", "subclasses of SplitByProperty to change how attributes are parsed into their string representations.", "based on the wafers of the detectors as given by the BolometerProperties key.", "the property of the detectors as given by the BolometerProperties key. Return the", "If bands is not None, use only the bands in the list (possibly", "causes this to get its band mapping from an alternative data source. '''", "that exists in the input. output_root : str Prefix for the output keys.", "self.output_root = output_root if output_root is not None else input if property_list is", "output_root is not None else input if property_list is not None: self.props =", "return str(prop) def __call__(self, frame): if self.bpmkey in frame: self.bpm = frame[self.bpmkey] if", "str The key name of the BolometerPropertiesMap from which to extract the requested", "= inmap[b] for prop in out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod", "input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer is None: return", "(e.g. CalTimestreams150GHz with the default options). If bands is not None, use only", "None: self.props = [self.converter(x) if not isinstance(x, str) else x for x in", "< 0: return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self,", "input if property_list is not None: self.props = [self.converter(x) if not isinstance(x, str)", "super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer is", "frame): if self.bpmkey in frame: self.bpm = frame[self.bpmkey] if self.input not in frame:", "the frame). Otherwise, creates maps for every that exists in the input. output_root", "not in frame: return inmap = frame[self.input] out = {} if self.props is", "output maps named output_root + wafer (e.g. CalTimestreamsW172 with the default options). If", "an alternative data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod", "input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input map given by input", "method. If property_list is not None, use only the names in the list", "the input. output_root : str Prefix for the output keys. If None (default),", "Split the input map given by input into several output maps named output_root", "None if the argument is invalid. Overload this function in subclasses of SplitByProperty", "be converted to strings using the `SplitByProperty.converter` method. If property_list is not None,", "converter(wafer): if wafer is None: return None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): '''", "was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', output_root=None, types=None, bpm='BolometerProperties'):", "options). If bands is not None, use only the bands in the list", "pixel_type: return None pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a': return None if", "Setting bpm to a non-default value causes this to get its band mapping", "string representation of the input argument, or None if the argument is invalid.", "None return str(wafer).capitalize() @core.indexmod class SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed", "None if band < 0: return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class", "data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band):", "in the input. output_root : str Prefix for the output keys. If None", "inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop not in", "name to extract from the BolometerProperties object. Required. property_list : list of properties", "is None and prop is not None: out[prop] = type(inmap)() else: continue out[prop][b]", "If None (default), use `input` as the output root. bpm : str The", "\"\"\" Function for converting the property to its corresponding string name. Returns a", "given by input into several output maps named output_root + wafer (e.g. CalTimestreamsW172", "property_list is not None: self.props = [self.converter(x) if not isinstance(x, str) else x", "property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input map given by input into", "maps to the frame). Otherwise, creates maps for every that exists in the", "its band mapping from an alternative data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root,", "in the input. Setting bpm to a non-default value causes this to get", "source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band') @staticmethod def converter(band): if", "a non-default value causes this to get its wafer mapping from an alternative", "output maps named output_root + band + GHz (e.g. CalTimestreams150GHz with the default", "{} if self.props is not None: for prop in self.props: out[prop] = type(inmap)()", "None self.bpmkey = bpm self.bpm = None @staticmethod def converter(prop): \"\"\" Function for", "same type of maps as the one it was handed, e.g. G3TimestreamMap, G3MapInt,", "into several based on the wafers of the detectors as given by the", "if self.props is None and prop is not None: out[prop] = type(inmap)() else:", "is not None: out[prop] = type(inmap)() else: continue out[prop][b] = inmap[b] for prop", "into several output maps named output_root + wafer (e.g. CalTimestreamsW172 with the default", "class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand", "if property_list is not None: self.props = [self.converter(x) if not isinstance(x, str) else", "= self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop not in out: if self.props", "representations. \"\"\" if prop is None: return None return str(prop) def __call__(self, frame):", "its wafer mapping from an alternative data source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root,", "int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated,", "import math __all__ = ['SplitByProperty', 'SplitByBand', 'SplitTimestreamsByBand', 'SplitByWafer', 'SplitByPixelType'] @core.indexmod class SplitByProperty(object): '''", "out = {} if self.props is not None: for prop in self.props: out[prop]", "bands=None, bpm='BolometerProperties'): core.log_warn(\"SplitTimestreamsByBand is deprecated, use SplitByBand instead\") super(SplitTimestreamsByBand, self).__init__( input=input, output_root=output_root, bands=bands,", "one it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self, input='CalTimestreams', property=None,", "maps to the frame). Otherwise, creates maps for every wafer that exists in", "keyed by bolometer name and split it into several based on the bands", "maps as the one it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def", "@staticmethod def converter(prop): \"\"\" Function for converting the property to its corresponding string", "wafers is not None, use only the wafers in the list (possibly writing", "+ wafer (e.g. CalTimestreamsW172 with the default options). If wafers is not None,", "Otherwise, creates maps for every that exists in the input. output_root : str", "a string representation of the input argument, or None if the argument is", "default options). Arguments --------- input : str Key name of the input map", "= frame[self.input] out = {} if self.props is not None: for prop in", "default options). If bands is not None, use only the bands in the", "not None, use only the bands in the list (possibly writing empty timestream", "class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and", "source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if", "return inmap = frame[self.input] out = {} if self.props is not None: for", "Overload this function in subclasses of SplitByProperty to change how attributes are parsed", "CalTimestreams150GHz with the default options). If bands is not None, use only the", "an input G3FrameObject-derivative Map keyed by bolometer name and split it into several", "''' Split the input map given by input into several output maps named", "bpm=bpm) @core.indexmod class SplitByWafer(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer", "creates maps for every band that exists in the input. Setting bpm to", "from an alternative data source. ''' super(SplitByBand, self).__init__( input=input, output_root=output_root, property_list=bands, bpm=bpm, property='band')", "(default), use `input` as the output root. bpm : str The key name", "the input map. ''' if property is None: core.log_fatal(\"Property is a required argument\")", "map given by input into several output maps named output_root + wafer (e.g.", "try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop not in out:", "to split. property : str Attribute name to extract from the BolometerProperties object.", "input self.output_root = output_root if output_root is not None else input if property_list", "by bolometer name and split it into several based on the wafers of", "out.keys(): frame['%s%s' % (self.output_root, prop)] = out[prop] @core.indexmod class SplitByBand(SplitByProperty): ''' Take an", "by bolometer name and split it into several based on the property of", "self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue if prop not in out: if self.props is", "as the one it was handed, e.g. G3TimestreamMap, G3MapInt, etc. ''' def __init__(self,", "into several output maps named output_root + key (e.g. CalTimestreams + str(property)) with", "their string representations. \"\"\" if prop is None: return None return str(prop) def", "0: return None return '%dGHz' % int(band/core.G3Units.GHz) @core.indexmod class SplitTimestreamsByBand(SplitByBand): def __init__(self, input='CalTimestreams',", "input map to split. property : str Attribute name to extract from the", "bolometer name and split it into several based on the property of the", "None if not pixel_type: return None pixel_type = str(pixel_type) if pixel_type.lower() == 'n/a':", "BolometerPropertiesMap from which to extract the requested `property` for splitting the input map.", "None @staticmethod def converter(prop): \"\"\" Function for converting the property to its corresponding", "frame: return inmap = frame[self.input] out = {} if self.props is not None:", "empty timestream maps to the frame). Otherwise, creates maps for every wafer that", "isinstance(band, str): return band if math.isnan(band) or math.isinf(band): return None if band <", "exists in the input. output_root : str Prefix for the output keys. If", "class SplitByBand(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and", "from an alternative data source. ''' super(SplitByWafer, self).__init__( input=input, output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id')", "return None return str(prop) def __call__(self, frame): if self.bpmkey in frame: self.bpm =", "extract from the BolometerProperties object. Required. property_list : list of properties Properties to", "if math.isnan(band) or math.isinf(band): return None if band < 0: return None return", "type(inmap)() for b in inmap.keys(): try: prop = self.converter(getattr(self.bpm[b], self.bpmattr)) except KeyError: continue", "of the detectors as given by the BolometerProperties key. Return the same type", "SplitByPixelType(SplitByProperty): ''' Take an input G3FrameObject-derivative Map keyed by bolometer name and split", "def __init__(self, input='CalTimestreams', property=None, property_list=None, output_root=None, bpm='BolometerProperties'): ''' Split the input map given", "source. ''' super(SplitByPixelType, self).__init__( input=input, output_root=output_root, property_list=types, bpm=bpm, property='pixel_type') @staticmethod def converter(pixel_type): if", "on the property of the detectors as given by the BolometerProperties key. Return", "The key name of the BolometerPropertiesMap from which to extract the requested `property`", "output_root=output_root, property_list=wafers, bpm=bpm, property='wafer_id') @staticmethod def converter(wafer): if wafer is None: return None", "etc. ''' def __init__(self, input='CalTimestreams', output_root=None, bands=None, bpm='BolometerProperties'): ''' Split the input map", "is not None: self.props = [self.converter(x) if not isinstance(x, str) else x for", "the output keys. Entries that are not strings will be converted to strings", "the output keys. If None (default), use `input` as the output root. bpm", "str(prop) def __call__(self, frame): if self.bpmkey in frame: self.bpm = frame[self.bpmkey] if self.input" ]
[ "operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)),", "models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], },", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100,", "}, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em',", "('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'),", "'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering':", "('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)),", "('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'],", "Django 3.2 on 2021-04-27 02:00 import django.core.validators from django.db import migrations, models import", "), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)),", "'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')),", "serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()),", "3.2 on 2021-04-27 02:00 import django.core.validators from django.db import migrations, models import django.db.models.deletion", "import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial =", "name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)),", "options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ), migrations.AddIndex( model_name='categoria', index=models.Index(fields=['slug'], name='lancamentos_slug_1e2e80_idx'), ),", "'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True,", "['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)),", "['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)),", "default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], },", "('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={", "models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering':", "'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel(", "models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)),", "Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ],", "primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2,", "default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa',", "= [ ('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True,", "name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)),", "related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True,", "default=1)), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "}, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em',", "related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa',", "('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1,", "Migration(migrations.Migration): initial = True dependencies = [ ('carteiras', '0001_initial'), ] operations = [", "= True dependencies = [ ('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria',", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()),", "options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "by Django 3.2 on 2021-04-27 02:00 import django.core.validators from django.db import migrations, models", "('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'),", "(3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento',", "'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ), migrations.AddIndex( model_name='categoria',", "'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ), migrations.AddField(", "models.TextField()), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "02:00 import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial", "[ ('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data',", "[ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)),", "migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome',", "dependencies = [ ('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id',", "], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "on 2021-04-27 02:00 import django.core.validators from django.db import migrations, models import django.db.models.deletion class", "primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora',", "django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('carteiras', '0001_initial'), ] operations", "models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas',", "('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'], }, ),", "models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={", "migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo',", "['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)),", "(4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ),", "'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em',", "('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)),", "['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)),", "verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas',", "models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE,", "'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], },", "models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')],", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE,", "to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'),", "models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')),", "primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)),", "django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True", "'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ],", "models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'], },", "('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')],", "}, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em',", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('carteiras',", "name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1,", "('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'),", "models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em", "models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao',", "class Migration(migrations.Migration): initial = True dependencies = [ ('carteiras', '0001_initial'), ] operations =", "to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'),", "), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)),", "True dependencies = [ ('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies", "('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2,", "options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "}, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ), migrations.AddIndex( model_name='categoria', index=models.Index(fields=['slug'], name='lancamentos_slug_1e2e80_idx'),", "}, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em',", "serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()),", "max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'),", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug',", "Generated by Django 3.2 on 2021-04-27 02:00 import django.core.validators from django.db import migrations,", "(4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id',", "('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4,", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id',", "('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos',", "'0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ],", "related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True,", "models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel(", "models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')),", "migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem',", "('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'),", "(2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], }, ),", "serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)),", "'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em',", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [", "models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2,", "('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa',", "'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em',", "('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita',", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor',", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies =", "<reponame>douglaspands/controle-financeiro # Generated by Django 3.2 on 2021-04-27 02:00 import django.core.validators from django.db", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0,", "('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])),", "(2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={", "initial = True dependencies = [ ('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel(", "['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ), migrations.AddIndex( model_name='categoria', index=models.Index(fields=['slug'],", "('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel(", "= [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em',", "('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo',", "verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()),", "models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em", "'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'],", "migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo',", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total',", "('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1,", "models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering':", "'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering':", "models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel(", "Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], },", "models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2,", "options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora',", "models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4,", "primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao',", "verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias',", "'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em',", "], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ),", "models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3,", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2,", "(2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={", "import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('carteiras', '0001_initial'), ]", "# Generated by Django 3.2 on 2021-04-27 02:00 import django.core.validators from django.db import", "('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento',", "('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ),", "), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)),", "verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])), ('data', models.DateField()), ('valor', models.DecimalField(decimal_places=2, max_digits=11)), ('situacao',", "], options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ),", "2021-04-27 02:00 import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)),", "('carteiras', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={", "models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ],", "], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id',", "('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True,", "models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id',", "models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()), ('categorias', models.ManyToManyField(blank=True, to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ), migrations.AddIndex( model_name='categoria', index=models.Index(fields=['slug'], name='lancamentos_slug_1e2e80_idx'), ), ]", "primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora',", "models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'],", "migrations.CreateModel( name='Receita', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome',", "models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('carteiras', '0001_initial'),", "models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'),", "('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering':", "name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)),", "max_digits=11)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa',", "('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)), ('descricao', models.TextField()), ], options={ 'ordering': ['slug'],", "to='lancamentos.Categoria')), ('centro_custo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lancamentos', to='carteiras.centrocusto')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Receita',", "models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela',", "to='lancamentos.lancamento')), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento',", "models.DecimalField(decimal_places=2, max_digits=11)), ('datahora', models.DateTimeField()), ('quantidade_parcelas', models.IntegerField(default=1)), ('situacao', models.IntegerField(choices=[(1, 'Em Aberto'), (2, 'Pago'), (3,", "name='Parcela', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('ordem', models.IntegerField(validators=[django.core.validators.MinValueValidator(1)])),", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, max_digits=11)),", "] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em',", "'Cancelado'), (4, 'Estornado')], default=1)), ], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[", "('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')), ],", "serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('tipo', models.IntegerField(choices=[(1, 'Receita'), (2, 'Despesa')])), ('datahora', models.DateTimeField()),", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('criado_em', models.DateTimeField(auto_now_add=True)), ('atualizado_em', models.DateTimeField(auto_now=True)), ('titulo', models.CharField(max_length=100)), ('slug', models.SlugField(max_length=100, unique=True)),", "models.DateTimeField(auto_now=True)), ('nome', models.CharField(max_length=100)), ('valor_total', models.DecimalField(decimal_places=2, default=0, max_digits=11)), ('datahora', models.DateTimeField()), ('lancamento', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='receita', to='lancamentos.lancamento')),", "], options={ 'ordering': ['-datahora'], }, ), migrations.CreateModel( name='Lancamento', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "('descricao', models.TextField()), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Despesa', fields=[ ('id', models.BigAutoField(auto_created=True,", "options={ 'ordering': ['data'], }, ), migrations.AddField( model_name='despesa', name='lancamento', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='despesa', to='lancamentos.lancamento'), ), migrations.AddIndex(", "(3, 'Cancelado'), (4, 'Estornado')], default=1)), ('despesa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parcelas', to='lancamentos.despesa')), ], options={ 'ordering': ['data']," ]
[ "raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn(", "fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn(", "as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={} )", "from types import SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn(", "timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn( fields=sn(", "def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue),", "JiraClient import unittest from types import SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self):", "from jiraclient import JiraClient import unittest from types import SimpleNamespace as sn class", "fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '1h') if __name__ == '__main__':", "= sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '1h') if __name__", "SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={}", "timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '1h') if __name__ == '__main__': unittest.main()", "def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m')", "<filename>test_time_log_window.py from jiraclient import JiraClient import unittest from types import SimpleNamespace as sn", "= sn( fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue", "unittest from types import SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue =", "issue = sn( fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self):", "sn( fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue =", "test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '1h')", "class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={} ) ) )", ") self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} )", "import SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn(", "types import SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn(", "self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) )", "Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue),", ") ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate':", ") ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'}", "import unittest from types import SimpleNamespace as sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue", "sn class Test(unittest.TestCase): def test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={} ) )", "issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '1h') if", "test_get_remaining_estimate_empty(self): issue = sn( fields=sn( timetracking=sn( raw={} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '0m') def", "sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) ) self.assertEqual(JiraClient.get_remaining_estimate(issue), '1h') if __name__ ==", "import JiraClient import unittest from types import SimpleNamespace as sn class Test(unittest.TestCase): def", "jiraclient import JiraClient import unittest from types import SimpleNamespace as sn class Test(unittest.TestCase):", "'0m') def test_get_remaining_estimate(self): issue = sn( fields=sn( timetracking=sn( raw={'remainingEstimate': '1h'} ) ) )" ]
[ "Process to perform an alarm bits lookup to decode the actual alarm state", "REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except: #", "latest unique - BASICALLY THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE", "str(invalrmcount)) # #determine inv count - if less than 100, nothing more to", "+ str(invalrmcount)) # #set transactid and count to first one above # #nextinvalrmcount", "check if latest unique json has no records, if so delete it #", "parse the unique json file to get the new transaction id and count", "FOR INV RECORDS # #ELSE IF COUNT >= 100 --> NEED TO ITERATE", "--------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for item", "to first one above # #nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount #", "# for item in tanklist: # print(item) #need to fix # #Org example", "= d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in list: # try: # if", "# #print(k) # for k, v in k.items(): # if k == 'iOrganizationID':", "transactidstr # nextinvalrmcount = invalrmcount # # while more to get, set new", "of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 - works now,", "latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # alarmstatus", "True: # print(str(datetime.datetime.now()) + ' - wake up...') # #step1 - request all", "= p.get_tank_list() #gives list of tank ids # print(tanklist) # for item in", "and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 -", "# print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol of '", "#step3 - get latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 -", "for item in tanklist: #for each unique tank, create a unique file for", "# # Parse response # dresp = g.parse_response(soapResponse) # print(dresp) # INV ALARM", "latest inventory for each tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the", "INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS! # #TODO: Place", "do # nexttolastidstr = '' # newuniquedictresponse = [] # if invalrmcount ==", "+ p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr))))", "step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display the data #", "# print('Inventory count: ' + str(thecount)) # #IF COUNT <= 0 --> NO", "list in Tank value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List:", "- THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD OF", "iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID:", "NEW Inv Count: ' + str(newinvalrmcount)) # #Step 5- Repeat as neccessary until", "get and save unique json reponse for the next transactid - IMPORTANT: THIS", "starting point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest')", "d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in list: # try: # if k['iTankID']:", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv alrm count from", "# #NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE", "INV RECORDS # #ELSE IF COUNT >= 100 --> NEED TO ITERATE THRU", "NO NEW INV RECORDS # #MUST USE LATEST UNIQUE JSON FILE FOR INV", "gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new", "and count to first one above # nexttransactidstr = transactidstr # nextinvalrmcount =", "the Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) #", "while newinvalrmcount == 100: # time.sleep(3) # #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr)))", "FILE FOR INV RECORDS # #ELSE IF COUNT >= 100 --> NEED TO", "nextinvalrmcount = invalrmcount # # while more to get, set new transactid to", "NEW Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr #", "this first item # p = gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr", "step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var", "= transactidstr # nextinvalrmcount = invalrmcount # # while more to get, set", "latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank", "# tanklist = p.get_tank_list() #gives list of tank ids - THIS IS AN", "TO GET LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM", "BASICALLY THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST", "tanklist: #display latest inventory for each tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item))", "LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT AT END! # print('more", "inventory id for the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently", "uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse the unique", "# soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' #", "5- Repeat as neccessary until count < 100 to get the latest inventory", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for TransactionID ' + nexttransactidstr)", "than 100') # #save as latest inv json file # g.save_resp_unique_json(firstresponse, '_latest') #", "--> NO NEW INV RECORDS # #MUST USE LATEST UNIQUE JSON FILE FOR", "'_latest') # else: # print('Less than 100') # #save as latest inv json", "transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: '", "save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank in tanklist get", "works the same # # Step2 - Process the json file to get", "and save file # print('writing parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) #", "for item in tanklist: #for each unique tank, create a unique json file", "pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank']", "# Step1 - make request using simple inventory soap (ie. zero as ACK", "# if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than", "the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol", "# time.sleep(1) # print('retrieved tanks...') # #step2 - build tank list from file", "no records, if so delete it # if len(nexttolastidstr) > 0 and newinvalrmcount", "test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new", "in Process to perform an alarm bits lookup to decode the actual alarm", "json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now", "first one above # nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount # #", "# for item in tanklist: #display latest inventory for each tank in list", "# newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var #", "TO ITERATE THRU TO GET LATEST # #MUST MAKE SURE YOU SAVE EACH", "# print(' NEW Inv Count: ' + str(invalrmcount)) # #determine inv count -", "# #step3 - get latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4", "# print('TankID: ' + str(item) + ' currently has gross vol ' +", "make request using simple inventory soap (ie. zero as ACK code), parse response", "+ str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount", "transactid - IMPORTANT: THIS WILL GIVE AN EMPTY NEXT REPONSE # # uniquedictresponse", "1 # tanklist = p.get_tank_list() #gives list of tank ids # print('TankIDs: '", "while more to get, set new transactid to that from latest unique json", "CALL THE WEB SERVICE WITH TRANSACTID, YOU CANNOT GET IT AGAIN! # #ELSE", "= d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for item in invalrmlist: # if", "g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap())", "make a second gateway req using the TransactionID to create unique json -", "100') # #save as latest inv json file # g.save_resp_unique_json(firstresponse, '_latest') # #", "p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # print('TankID: ' +", "# #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique", "nexttransactidstr # # NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS", "alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180) #sleep", "print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol of ' #", "status of ' # + alarmstatus + ' calc alarm bits') # #TODO:", "# print(latestinvstr) #test6 - nice working test! # tanklist = p.get_tank_list() #gives list", "List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in list:", "= p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and save unique json", "case last item has zero records # nexttolastidstr = nexttransactidstr # #break while", "print(item) #test3 # bothlist = p.get_tankinv_list() # for item in bothlist: # print(item)", "- request all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) #", "#now, check if latest unique json has no records, if so delete it", "file # g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same", "= p.get_tank_list() #gives list of tank ids - THIS IS AN IMPORTANT STEP", "AN EMPTY NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr)", "# g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt = 'error' # return logtxt #", "' + str(item) + ' currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) +", "file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than 100')", "Inv Count: ' + str(invalrmcount)) # #set transactid and count to first one", "< 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique", "# testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count:", "an alarm bits lookup to decode the actual alarm state # #RUN.PY TEST", "except: # logtxt = 'error' # return logtxt # # TEST 9 -", "tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 - get latest inv and", "IMPORTANT: THIS WILL GIVE AN EMPTY NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr)))", "1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty", "json # --------------------------------------------------------- # ''' EARLY TEST SCENARIOS ''' # --------------------------------------------------------- # #", "INVENTORY THAT SOULD ALREADY EXIST # print('Zero new inventory records, use the existing", "tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp", "# if k['iTankID']: # print('ID: ' + str(k['iTankID'])) # except KeyError: # pass", "#break while loop if count less than 100 # if nextinvalrmcount < 100:", "gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2 -", "gals') # #step5 - works now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) #", "g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends", "# p = gateway.Process() #test1 # tanklist = p.get_tank_list() # for item in", "except KeyError: # pass # --------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION '''", "file created in step 1 # tanklist = p.get_tank_list() #gives list of tank", "last id string in case last item has zero records # nexttolastidstr =", "gateway.Process() # #step1 - req all tanks and write to master tanks file", "soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # #", "+ item['iTankID'] + ' has alarm status ' + item['iCalcAlarmBits']) # f =", "IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount <= 0: # #No new", "4 - fully working # g = gateway.Gateway() # p = gateway.Process() #", "item in tanklist: #display latest inventory for each tank in list # latestinvidstr", "gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST", "SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = ''", "time.sleep(1) # print('retrieved tanks...') # #step2 - build tank list from file created", "gateway import soapreqs import time from datetime import datetime #Imports currently used for", "the list in Tank value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank", "using the TransactionID to create unique json - first test # testinvtransactid =", "# pass # --------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION ''' # ---------------------------------------------------------", "the TransactionID and Inv Calc Alarm count # p = gateway.Process() # transactidstr", "TransactionID and Inv Calc Alarm count # p = gateway.Process() # transactidstr =", "records, if so delete it # if len(nexttolastidstr) > 0 and newinvalrmcount <", "GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD OF GETTING LATEST INVENTORY", "k['iTankID']: # print('ID: ' + str(k['iTankID'])) # except KeyError: # pass # ---------------------------------------------------------", "in step 1 # tanklist = p.get_tank_list() #gives list of tank ids #", "INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS", "has gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5", "p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if latest unique json has no records,", "TO CHECK FOR CHANGES VIA GATEWAY # # TODO: Switch print stmts to", "# while True: # #save next to last id string in case last", "# invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) #", "= gateway.Process() # #step1 - req all tanks and write to master tanks", "g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC TRANSACTIONID TESTS # # Step1 -", "GEN AND REQUEST TESTS # g = gateway.Gateway() # Make the Request to", "print('TransactID: ' + transactidstr) # print('Inventory count: ' + str(thecount)) # #IF COUNT", "p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # print('Tank ' +", "' + str(newinvalrmcount)) # #Step 5- Repeat as neccessary until count < 100", "- parse and display the data # for item in tanklist: # latestinvidstr", "# invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for item in invalrmlist:", "LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST # print('Zero new inventory records, use", "to create unique json - first test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid)))", "tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: # print(item) #need to fix", "list # for k in list: # #print(type(k)) # #print(k) # for k,", "inventory records, use the existing latest') # elif thecount >= 100: # #ITERATE", "nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2)", "EMPTY NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) #", "if thecount <= 0: # #No new inv, Use latest unique - BASICALLY", "tanklist: # print(item) #need to fix # #Org example reading the list in", "# nexttransactidstr = newtransactidstr #updates nexttransactidstr # # NEW TEST TO GET LATEST", "for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': #", "unique json - first test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount", "function that whose job is to basically create the latest inventory json file.", "--------------------------------------------------------- # # GATEWAY SOAP GEN AND REQUEST TESTS # g = gateway.Gateway()", "+ str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 - works now, similar to step", "each tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id", "to log statements # print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') # g =", "# if nextinvalrmcount < 100: # break # print('fetching next...') # newtransactidstr =", "for item in invlist: # print(item) #test3 # bothlist = p.get_tankinv_list() # for", "currently has alarm status of ' # + alarmstatus + ' calc alarm", "# TEST 9 - modified test #8 for using latest inv above based", "' + item['iCalcAlarmBits']) # f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) #", "# g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180) #sleep for 3mins, increase this", "nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3)", "+ item['iCalcAlarmBits']) # f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for", "= g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse =", "than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY #", "GATEWAY SOAP GEN AND REQUEST TESTS # g = gateway.Gateway() # Make the", "for the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has gross", "used for testing only # import pprint # import json # --------------------------------------------------------- #", "#print(k) # for k, v in k.items(): # if k == 'iOrganizationID': #", "reading the list in Tank value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) #", "if nextinvalrmcount < 100: # break # print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr)", "item in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y", "p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) # print('Inventory count: ' + str(thecount)) #", "#test3 # bothlist = p.get_tankinv_list() # for item in bothlist: # print(item) #test4", "== 'iOrganizationID': # print(k, v) # #print(v) # #Loc example reading the list", "TransactionID to create unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr)", "#finally, rename the unique inv json file to be the generic starting point", "# print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k", "for k in d['soap:Body']: # print(k) # break # d = {'ONE':{'TWO':{'THREE':'some txt", "# if k['iLocationID']: # print('ID: ' + str(k['iLocationID']) + ' Name: ' +", "example reading the list in Tank value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode']))", "has zero records # nexttolastidstr = nexttransactidstr # #break while loop if count", "tank, create a unique json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) #", "ALARM CALC TRANSACTIONID TESTS # # Step1 - make request using simple inventory", "RECORDS # #MUST USE LATEST UNIQUE JSON FILE FOR INV RECORDS # #ELSE", "# else: # print('Less than 100') # #save as latest inv json file", "' - wake up...') # #step1 - request all tanks and write to", "uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # #", "build tank list from file created in step 1 # tanklist = p.get_tank_list()", "newtransactidstr) # #get the new inv alrm count from the newtransactidstr # newinvalrmcount", "value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list", "= g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC TRANSACTIONID TESTS # # Step1", "to get the new transaction id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) #", "- build tank list from file created in step 1 # tanklist =", "p = gateway.Process() # #step1 - req all tanks and write to master", "# print('Less than 100') # #save as latest inv json file # g.save_resp_unique_json(firstresponse,", "# try: # if k['iTankID']: # print('ID: ' + str(k['iTankID'])) # except KeyError:", "#soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2 - Process the json file to", "g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display the data # for item in", "= d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: # print(item) #need to fix #", "a function that whose job is to basically create the latest inventory json", "p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) # print('Inventory count:", "logtxt # # TEST 9 - modified test #8 for using latest inv", "to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse =", "depends on count of this first item # p = gateway.Process() # thecount", "each tank in tanklist get latest inventory and display # #note: for this", "# import pprint # import json # --------------------------------------------------------- # ''' EARLY TEST SCENARIOS", "# for item in invlist: # print(item) #test3 # bothlist = p.get_tankinv_list() #", "TEST 9 - modified test #8 for using latest inv above based on", "= g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp = g.parse_response(soapResponse) # print(dresp) #", "p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr + '", "# print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice", "# ''' REAL GATEWAY TEST SECTION ''' # --------------------------------------------------------- # # GATEWAY SOAP", "list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns", "g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180) #sleep for 3mins, increase this later", "100 TANKS! # #TODO: Place thi ALL into a function that whose job", "to get the TransactionID and Inv Calc Alarm count # p = gateway.Process()", "newtransactidstr) # #get the next unique json from gateway request # newuniquedictresponse =", "if latest unique json has no records, if so delete it # if", "THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF GETTING", "#step2 - build tank list from file created in step 1 # tanklist", "print('Less than 100') # #save as latest inv json file # g.save_resp_unique_json(firstresponse, '_latest')", "# newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr) # #get the", "#FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST # print('Zero new", "tank # print('TankID: ' + str(item) + ' currently has gross vol '", "# for k in list: # #print(type(k)) # #print(k) # for k, v", "the new inv alarm count from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) #", "- make a second gateway req using the TransactionID to create unique json", "# print(item) #test2 # invlist = p.get_inventory_list() # for item in invlist: #", "(ie. zero as ACK code), parse response and save json file # g", "the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization']", "+ newtransactidstr) # #get the next unique json from gateway request # newuniquedictresponse", "' + str(k['iLocationID']) + ' Name: ' + str(k['sLocationName']) # + ' Address:", "#for each unique tank, create a unique json file for each tank #", "gateway.Gateway() # p = gateway.Process() # while True: # print(str(datetime.datetime.now()) + ' -", "Make the Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap())", "' Name: ' + str(k['sLocationName']) # + ' Address: ' + str(k['sAddress1'])) #", "json # while True: # #save next to last id string in case", "' Address: ' + str(k['sAddress1'])) # except KeyError: # pass # #Tank example", "file # print('writing parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed", "latest inventory id for the tank # print('TankID: ' + str(item) + '", "# if item['iCalcAlarmBits'] != str(0): # # print('Tank ' + item['iTankID'] + '", "latest inventory id for the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + '", "3mins, increase this later # def build_latest_inv_file(): # '''NEW TEST TO GET LATEST", "'0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status of", "file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) #", "#updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: ' +", "and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank in tanklist", "it # if len(nexttolastidstr) > 0 and newinvalrmcount < 1: # deletresponsestr =", "if k == 'iOrganizationID': # print(k, v) # #print(v) # #Loc example reading", "reponse for the next transactid - IMPORTANT: THIS WILL GIVE AN EMPTY NEXT", "and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) #", "json file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than", "# tanklist = p.get_tank_list() #gives list of tank ids # print(tanklist) # for", "METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100", "f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in d['soap:Body']: # print(k) # break #", "p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + '", "uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for TransactionID '", "# invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr + ' Inv Count:", "Calc Alarm count # p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount", "# print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0): # # print('Tank ' +", "str(invalrmcount)) # time.sleep(2) #wait 2 secs # #Step2.5 - make a second gateway", "in case last item has zero records # nexttolastidstr = nexttransactidstr # #break", "YOU SAVE EACH UNIQUE JSON! ONCE YOU CALL THE WEB SERVICE WITH TRANSACTID,", "get latest inv and save file # print('writing parsed inventory data to file...')", "= '' # newuniquedictresponse = [] # if invalrmcount == 100: # print('more", "' + str(thecount)) # #IF COUNT <= 0 --> NO NEW INV RECORDS", "unique inv json file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: #", "g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse the unique json file to get", "TO THE GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway() # p = gateway.Process()", "= {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) #", "IF PRESENT AT END! # print('more than 100, need to iterate to latest')", "ids # print('TankIDs: ' + str(tanklist)) # for item in tanklist: #for each", "second gateway request using the TransactionID to create unique json file # uniquedictresponse", "more to do # nexttolastidstr = '' # newuniquedictresponse = [] # if", "newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the", "from file created in step 1 # tanklist = p.get_tank_list() #gives list of", "return logtxt # # TEST 9 - modified test #8 for using latest", "print('zzzzz') # time.sleep(180) #sleep for 3mins, increase this later # def build_latest_inv_file(): #", "THE WEB SERVICE WITH TRANSACTID, YOU CANNOT GET IT AGAIN! # #ELSE YOU", "' calc alarm bits') # #TODO: Add function in Process to perform an", "print('TankID: ' + str(item) + ' currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr)", "print(k, v) # #print(v) # #Loc example reading the list in Location value", "#ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT", "TEST # # SETUP RUN TEST TO CHECK FOR CHANGES VIA GATEWAY #", "invalrmcount # # while more to get, set new transactid to that from", "#step7 - parse and display the data # for item in tanklist: #", "GIVE AN EMPTY NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse,", "inventory # nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount # while newinvalrmcount ==", "BELOW!!!!!!! # #print(tanklist) # for item in tanklist: #for each unique tank, create", "#also get and save unique json reponse for the next transactid - IMPORTANT:", "'10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp = g.parse_response(soapResponse)", "for item in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d", "request all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1)", "save unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get", "Repeat as neccessary until count < 100 to get the latest inventory #", "CHANGES VIA GATEWAY # # TODO: Switch print stmts to log statements #", "'%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0):", "# #step4 - for each tank in tanklist get latest inventory and display", "#replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json", "# # while more to get, set new transactid to that from latest", "# newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount)) # #Step3 -", "= p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if latest unique json has no", "' + str(k['sAddress1'])) # except KeyError: # pass # #Tank example reading the", "p.get_inventory_list() # for item in invlist: # print(item) #test3 # bothlist = p.get_tankinv_list()", "newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: ' + str(newinvalrmcount))", "Parse response # dresp = g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC TRANSACTIONID", "#sleep for 3mins, increase this later # def build_latest_inv_file(): # '''NEW TEST TO", "+ p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full", "= p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) # #set transactid", "# GATEWAY SOAP GEN AND REQUEST TESTS # g = gateway.Gateway() # Make", "steps 1 and 3 above - need tank and inv # for item", "= p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) #", "# thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr)", "nexttransactidstr) # #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr =", "records, use the existing latest') # elif thecount >= 100: # #ITERATE TO", "time from datetime import datetime #Imports currently used for testing only # import", "# g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() #", "the list in Location value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location", "REQUEST TESTS # g = gateway.Gateway() # Make the Request to Gateway #", "# g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv json file to be the", "list of tank ids # print(tanklist) # for item in tanklist: #display latest", "True: # #save next to last id string in case last item has", "time.sleep(2) # #step2 - build tank list from file created in step 1", "= newtransactidstr #updates nexttransactidstr # # NEW TEST TO GET LATEST INV RECORDS", "+ p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status of ' # + alarmstatus", "and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save", "to last id string in case last item has zero records # nexttolastidstr", "to latest') # #set transactid and count to first one above # nexttransactidstr", "= open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in d['soap:Body']: #", "+ ' NEW Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates", "records # nexttolastidstr = nexttransactidstr # #break while loop if count less than", "< 100 to get the latest inventory # nexttransactidstr = transactidstr # newinvalrmcount", "'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty unique inv json file", "count: ' + str(thecount)) # #IF COUNT <= 0 --> NO NEW INV", "item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for", "1 # tanklist = p.get_tank_list() #gives list of tank ids - THIS IS", "from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the", "+ ' currently has alarm status of ' # + alarmstatus + '", "each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 - get latest inv", "# #ELSE IF COUNT >= 100 --> NEED TO ITERATE THRU TO GET", "while loop if count less than 100 # if nextinvalrmcount < 100: #", "new inv alrm count from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print('", "# + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 - works now, similar to", "soapreqs import time from datetime import datetime #Imports currently used for testing only", "+ ' currently has gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + '", "increase this later # def build_latest_inv_file(): # '''NEW TEST TO GET LATEST INV", "g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr))", "unique - BASICALLY THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO", "tanklist: #for each unique tank, create a unique json file for each tank", "to create unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) #", "print('Inventory count: ' + str(thecount)) # #IF COUNT <= 0 --> NO NEW", "all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) #", "in tanklist: # print(item) #need to fix # #Org example reading the list", "# #set transactid and count to first one above # nexttransactidstr = transactidstr", "# #ELSE YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST #", "example reading the list in Location value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode']))", "reading the list in Location value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) #", "than 100, nothing more to do # nexttolastidstr = '' # newuniquedictresponse =", "print(dresp) # INV ALARM CALC TRANSACTIONID TESTS # # Step1 - make request", "tank, create a unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1)", "print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway() # p =", "# #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # #", "PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF GETTING LATEST", "100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY # p", "the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: ' +", "the generic starting point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) > 0: #", "APP\\n--------------------------------') # g = gateway.Gateway() # p = gateway.Process() # while True: #", "#finally, save the latest non-empty unique inv json file to the latest #", "#update nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #", "#get the new inv alarm count from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr)", "less than 100 # if nextinvalrmcount < 100: # break # print('fetching next...')", "alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item))", "alarmstatus != '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has alarm", "the json file to get the TransactionID and Inv Calc Alarm count #", "tanklist = p.get_tank_list() #gives list of tank ids # print(tanklist) # for item", "TESTS # g = gateway.Gateway() # Make the Request to Gateway # soapResponse", "Inv Count: ' + str(invalrmcount)) # #determine inv count - if less than", "# #Step 5- Repeat as neccessary until count < 100 to get the", "soap (ie. zero as ACK code), parse response and save json file #", "item in invlist: # print(item) #test3 # bothlist = p.get_tankinv_list() # for item", "LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS", "count # g = gateway.Gateway() # p = gateway.Process() # #step1 - req", "TESTS # # Step1 - make request using simple inventory soap (ie. zero", "the latest inventory id for the tank # print('TankID: ' + str(item) +", "lookup to decode the actual alarm state # #RUN.PY TEST # # SETUP", "RUN TEST TO CHECK FOR CHANGES VIA GATEWAY # # TODO: Switch print", "ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS!''' # try: # logtxt", "as ACK code), parse response and save json file # g = gateway.Gateway()", "Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap())", "print('ID: ' + str(k['iLocationID']) + ' Name: ' + str(k['sLocationName']) # + '", "- for each tank in tanklist get latest inventory and display # #note:", "# #No new inv, Use latest unique - BASICALLY THIS MEANS NEED TO", "# print('retrieved tanks...') # #step2 - build tank list from file created in", "# break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE']) #", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends on count of this first", "= p.get_tank_list() # for item in tanklist: # print(item) #test2 # invlist =", "break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO'])", "# nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount # while newinvalrmcount == 100:", "TransactionID to create unique json - first test # testinvtransactid = '47174434' #", "TransactionID: ' + newtransactidstr) # #get the next unique json from gateway request", "< 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest", "file! # if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less", "Step1 - make request using simple inventory soap (ie. zero as ACK code),", "# else: # print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING", "# #IF COUNT <= 0 --> NO NEW INV RECORDS # #MUST USE", "time.sleep(2) # #now, check if latest unique json has no records, if so", "# # #also get and save unique json reponse for the next transactid", "THAN 100 TANKS!''' # try: # logtxt = '' # g = gateway.Gateway()", "# # Step2 - Process the json file to get the TransactionID and", "item in tanklist: # print(item) #need to fix # #Org example reading the", "and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID:", "GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT AT END! # print('more than 100,", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv alrm count from the", "code), parse response and save json file # g = gateway.Gateway() # dictresponse", "and save json file # g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0')", "json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 -", "2 secs # #Step2.5 - make a second gateway req using the TransactionID", "LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS", "#datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if", "IF COUNT >= 100 --> NEED TO ITERATE THRU TO GET LATEST #", "= g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr =", "[] # if invalrmcount == 100: # print('more than 100, need to iterate", "p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount)) # #Step3 - make a second", "# print('writing parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz')", "invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for item in invalrmlist: #", "print(k) # break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE'])", "nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount # # while more to get,", "# if len(nexttolastidstr) > 0 and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json'", "the tank # print('TankID: ' + str(item) + ' currently has gross vol", "# for k, v in k.items(): # if k == 'iOrganizationID': # print(k,", "and display the data # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item))", "file to get the new transaction id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr)", "in list: # #print(type(k)) # #print(k) # for k, v in k.items(): #", "# print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list =", "inv, Use latest unique - BASICALLY THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json", "in Location value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ')", "# #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST # print('Zero", "'latest') # PROCESSING TEST SECTION ONLY # p = gateway.Process() #test1 # tanklist", "reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv", "%Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0): # #", "# g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty unique inv json file to", "'_latest') # # #also get and save unique json reponse for the next", "+ str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list #", "LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE:", "get latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each", "tank and inv # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get", "one above # #nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount # nexttransactidstr =", "time.sleep(3) # #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created", "than 100, need to iterate to latest') # #set transactid and count to", "above - need tank and inv # for item in tanklist: # latestinvidstr", "#MUST MAKE SURE YOU SAVE EACH UNIQUE JSON! ONCE YOU CALL THE WEB", "# dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2 - Process", "whose job is to basically create the latest inventory json file. # g", "- need tank and inv # for item in tanklist: # latestinvidstr =", "p = gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID:", "for each tank in tanklist get latest inventory and display # #note: for", "alarm count from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv", "p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr)", "so delete it # if len(nexttolastidstr) > 0 and newinvalrmcount < 1: #", "THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST # print('Zero new inventory records,", "Count: ' + str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr)", "+ ' gals') # #step5 - works now, similar to step 4 #", "'' # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # #", "test working thru step 4 - fully working # g = gateway.Gateway() #", "#works! # print(latestinvstr) #test6 - nice working test! # tanklist = p.get_tank_list() #gives", "# #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and save", "print('more than 100, need to iterate to latest') # #set transactid and count", "latest non-empty unique inv json file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') #", "# newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv alrm", "value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list", "# newuniquedictresponse = [] # if invalrmcount == 100: # print('more than 100,", "# # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm()", "the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than 100') # #save", "= invalrmcount # nextinvalrmcount = thecount # nexttransactidstr = transactidstr # # while", "next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr) # #get", "basically create the latest inventory json file. # g = gateway.Gateway() # firstresponse", "# print('Created unique json for TransactionID ' + nexttransactidstr) # #replaces step4 #", "# for item in tanklist: #for each unique tank, create a unique file", "code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns", "+ str(invalrmcount)) # time.sleep(2) #wait 2 secs # #Step2.5 - make a second", "# #break while loop if count less than 100 # if nextinvalrmcount <", "existing latest') # elif thecount >= 100: # #ITERATE TO GET THE LATEST", "+ ' has alarm status ' + item['iCalcAlarmBits']) # f = open('temp.json', 'w')", "MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY", "+ str(k['sAddress1'])) # except KeyError: # pass # #Tank example reading the list", "count: ' + str(newinvalrmcount)) # #Step3 - make a second gateway request using", "' + nexttransactidstr) # #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount #", "import json # --------------------------------------------------------- # ''' EARLY TEST SCENARIOS ''' # --------------------------------------------------------- #", "# list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in list: # try:", "#wait 2 secs # #Step2.5 - make a second gateway req using the", "gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full test working thru step", "print(tanklist) # for item in tanklist: #display latest inventory for each tank in", "THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json", "GET LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM #", "nextinvalrmcount < 100: # break # print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #", "g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv alrm count from the newtransactidstr #", "' + transactidstr + ' Inv Count: ' + str(invalrmcount)) # time.sleep(2) #wait", "= g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse =", "# uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt =", "# print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in list:", "# TEST 8 - full test working thru step 4 - fully working", "YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount", "this later # def build_latest_inv_file(): # '''NEW TEST TO GET LATEST INV RECORDS", "to do # nexttolastidstr = '' # newuniquedictresponse = [] # if invalrmcount", "#set transactid and count to first one above # #nextinvalrmcount = invalrmcount #", "unique tank, create a unique json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item)", "- make a second gateway request using the TransactionID to create unique json", "try: # if k['iLocationID']: # print('ID: ' + str(k['iLocationID']) + ' Name: '", "fix # #Org example reading the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) #", "# pass # #Tank example reading the list in Tank value # print('Return", "print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank']", "# #save as latest inv json file # g.save_resp_unique_json(firstresponse, '_latest') # # #also", "AND REQUEST TESTS # g = gateway.Gateway() # Make the Request to Gateway", "inventorytime = '' # for item in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object", "# nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check", "100: # time.sleep(3) # #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr)", "#nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount # nexttransactidstr = transactidstr # #", "and count to first one above # #nextinvalrmcount = invalrmcount # nextinvalrmcount =", "firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends on count of this", "Everything depends on count of this first item # p = gateway.Process() #", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount =", "latest inventory id for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus", "#set transactid and count to first one above # nexttransactidstr = transactidstr #", "rename the unique inv json file to be the generic starting point GetInventoryCalcAlarmResponselatest", "print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in", "+ str(k['iTankID'])) # except KeyError: # pass # --------------------------------------------------------- # ''' REAL GATEWAY", "= gateway.Gateway() # Make the Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) #", "# --------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION ''' # --------------------------------------------------------- # #", "tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': # print('Tank '", "# #nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount # nexttransactidstr = transactidstr #", "g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for TransactionID ' + nexttransactidstr) # #replaces", "time.sleep(3) # #now, check if latest unique json has no records, if so", "unique json from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) #", "tanks...') # #step2 - build tank list from file created in step 1", "print(item) #need to fix # #Org example reading the list in Organization value", "TransactionID: ' + nexttransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) #", "invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and save unique", "to get the latest inventory # nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount", "# f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in", "'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv json file to be", "# TODO: Switch print stmts to log statements # print('\\nWELCOME TO THE GATEWAY", "== 100: # print('more than 100, need to iterate to latest') # #set", "- nice working test! # tanklist = p.get_tank_list() #gives list of tank ids", "0 --> NO NEW INV RECORDS # #MUST USE LATEST UNIQUE JSON FILE", "newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: ' + nexttransactidstr", "' Inv Count: ' + str(invalrmcount)) # time.sleep(2) #wait 2 secs # #Step2.5", "USE LATEST UNIQUE JSON FILE FOR INV RECORDS # #ELSE IF COUNT >=", "' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full test working thru", "tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for", "def build_latest_inv_file(): # '''NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS", "for testing only # import pprint # import json # --------------------------------------------------------- # '''", "# if invalrmcount == 100: # print('more than 100, need to iterate to", "this to work, you must have already done steps 1 and 3 above", "parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180)", "%I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0): # # print('Tank", "{'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode'])", "to get, set new transactid to that from latest unique json # while", "# g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same #", "pprint # import json # --------------------------------------------------------- # ''' EARLY TEST SCENARIOS ''' #", "k, v in k.items(): # if k == 'iOrganizationID': # print(k, v) #", "response # dresp = g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC TRANSACTIONID TESTS", "- get latest inv and save file # print('writing parsed inventory data to", "# Step2 - Process the json file to get the TransactionID and Inv", "str(tanklist)) # for item in tanklist: #for each unique tank, create a unique", "of ' # + alarmstatus + ' calc alarm bits') # #TODO: Add", "' + transactidstr) # print('Inventory count: ' + str(thecount)) # #IF COUNT <=", "TRANSACTID, YOU CANNOT GET IT AGAIN! # #ELSE YOU HAVE THE LATEST INV", "#ELSE YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if", "testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: '", "SECTION ONLY # p = gateway.Process() #test1 # tanklist = p.get_tank_list() # for", "gateway request using the TransactionID to create unique json file # uniquedictresponse =", "invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) # #determine", "#print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full test working thru step 4 -", "# #finally, save the latest non-empty unique inv json file to the latest", "ids # print(tanklist) # for item in tanklist: #display latest inventory for each", "EXIST # print('Zero new inventory records, use the existing latest') # elif thecount", "# # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for item in", "json file! # if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: #", "thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) #", "# logtxt = 'error' # return logtxt # # TEST 9 - modified", "+ transactidstr) # #get and save unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr)))", "' + p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status of ' # +", "the latest inventory id for the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) +", "import gateway import soapreqs import time from datetime import datetime #Imports currently used", "#get and save unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr)", "to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') # #step2", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse the unique json file", "TEST 8 - full test working thru step 4 - fully working #", "tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank", "first one above # #nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount # nexttransactidstr", "you must have already done steps 1 and 3 above - need tank", "THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN", "id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW", "create a unique json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1)", "UNIQUE JSON! ONCE YOU CALL THE WEB SERVICE WITH TRANSACTID, YOU CANNOT GET", "AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for item in", "nextinvalrmcount = thecount # nexttransactidstr = transactidstr # # while more to get,", "list: # #print(type(k)) # #print(k) # for k, v in k.items(): # if", "# print('more than 100, need to iterate to latest') # #set transactid and", "transactidstr) # print('Inventory count: ' + str(thecount)) # #IF COUNT <= 0 -->", "next transactid - IMPORTANT: THIS WILL GIVE AN EMPTY NEXT REPONSE # #", "100 TANKS!''' # try: # logtxt = '' # g = gateway.Gateway() #", "#Tank example reading the list in Tank value # print('Return code: ' +", "- get latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for", "newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv alrm count", "value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for", "the actual alarm state # #RUN.PY TEST # # SETUP RUN TEST TO", "json file # g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the", "#test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full test working thru step 4", "# print('NEW TransactionID: ' + newtransactidstr + ' NEW Inv Count: ' +", "tanklist = p.get_tank_list() #gives list of tank ids # print('TankIDs: ' + str(tanklist))", "in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the", "#delay # print('zzzzz') # time.sleep(180) #sleep for 3mins, increase this later # def", "json file # g.save_resp_unique_json(firstresponse, '_latest') # # #also get and save unique json", "p = gateway.Process() # while True: # print(str(datetime.datetime.now()) + ' - wake up...')", "g = gateway.Gateway() # p = gateway.Process() # #step1 - req all tanks", "print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: #", "= p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr) # #get the next unique", "for item in tanklist: # print(item) #need to fix # #Org example reading", "print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY", "to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display the data", "#temp var # print('NEW TransactionID: ' + nexttransactidstr + ' NEW Inv Count:", "above # #nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount # nexttransactidstr = transactidstr", "for 3mins, increase this later # def build_latest_inv_file(): # '''NEW TEST TO GET", "''' # --------------------------------------------------------- # # GATEWAY SOAP GEN AND REQUEST TESTS # g", "' currently has alarm status of ' # + alarmstatus + ' calc", "state # #RUN.PY TEST # # SETUP RUN TEST TO CHECK FOR CHANGES", "= 'error' # return logtxt # # TEST 9 - modified test #8", "str(thecount)) # #IF COUNT <= 0 --> NO NEW INV RECORDS # #MUST", "tanklist = p.get_tank_list() # for item in tanklist: # print(item) #test2 # invlist", "str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr # # NEW TEST TO GET", "alarm status ' + item['iCalcAlarmBits']) # f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True,", "GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS!''' #", "uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount))", "# for k in list: # try: # if k['iTankID']: # print('ID: '", "print('TransactionID: ' + transactidstr) # #get and save unique json reponse # uniquedictresponse", "# time.sleep(2) # #step2 - build tank list from file created in step", "ONCE YOU CALL THE WEB SERVICE WITH TRANSACTID, YOU CANNOT GET IT AGAIN!", "last item has zero records # nexttolastidstr = nexttransactidstr # #break while loop", "newuniquedictresponse = [] # if invalrmcount == 100: # print('more than 100, need", "latest inventory and display # #note: for this to work, you must have", "= p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr + ' Inv Count: ' +", "than 100, need to iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount", "done steps 1 and 3 above - need tank and inv # for", "NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except:", "k.items(): # if k == 'iOrganizationID': # print(k, v) # #print(v) # #Loc", "invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) # #set", "# #finally, rename the unique inv json file to be the generic starting", "#test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice working test!", "= d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in list: # try: # if", "' + newtransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) # #Step", "= gateway.Process() #test1 # tanklist = p.get_tank_list() # for item in tanklist: #", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt = 'error' #", "#Loc example reading the list in Location value # print('Return code: ' +", "+ str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list #", "+ ' - wake up...') # #step1 - request all tanks and write", "working thru step 4 - fully working # g = gateway.Gateway() # p", "time.sleep(1) # #step3 - get latest inv and save file # print('writing parsed", "''' # --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' #", "# g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse the unique json file to", "soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse", "1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv", "IF YOU HAVE LESS THAN 100 TANKS!''' # try: # logtxt = ''", "SAVE TO LATEST # if thecount <= 0: # #No new inv, Use", "list in Location value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List:", "in invlist: # print(item) #test3 # bothlist = p.get_tankinv_list() # for item in", "latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # print('TankID:", "p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get", "= nexttransactidstr # #break while loop if count less than 100 # if", "to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: '", ">= 100 --> NEED TO ITERATE THRU TO GET LATEST # #MUST MAKE", "HAVE LESS THAN 100 TANKS! # #TODO: Place thi ALL into a function", "inventory and display # #note: for this to work, you must have already", "# if alarmstatus != '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently", "works now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and", "p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr + ' Inv", "Use latest unique - BASICALLY THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json #", "'latest') # else: # print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') #", "make a second gateway request using the TransactionID to create unique json file", "unique json for TransactionID ' + nexttransactidstr) # #replaces step4 # newinvalrmcount =", "item # p = gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID()", "GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF GETTING LATEST INVENTORY", "nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if latest unique json has", "tank ids # print(tanklist) # for item in tanklist: #display latest inventory for", "request using simple inventory soap (ie. zero as ACK code), parse response and", "nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now,", "using simple inventory soap (ie. zero as ACK code), parse response and save", "#invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and save unique", "print('NEW TransactionID: ' + newtransactidstr) # #get the next unique json from gateway", "- IMPORTANT: THIS WILL GIVE AN EMPTY NEXT REPONSE # # uniquedictresponse =", "+ nexttransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr =", "# # NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES", "to be the generic starting point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) >", "if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than 100,", "!= '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status", "TEST SECTION ''' # --------------------------------------------------------- # # GATEWAY SOAP GEN AND REQUEST TESTS", "g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647'", "d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns'])", "in tanklist: #for each unique tank, create a unique json file for each", "write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') #", "invlist: # print(item) #test3 # bothlist = p.get_tankinv_list() # for item in bothlist:", "d['soap:Body']: # print(k) # break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d)", "# transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) # print('Inventory count: '", "# while more to get, set new transactid to that from latest unique", "g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY # p = gateway.Process() #test1 #", "transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr)", "thecount # nexttransactidstr = transactidstr # # while more to get, set new", "parse and display the data # for item in tanklist: # latestinvidstr =", "g = gateway.Gateway() # Make the Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap())", "p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status of ' # + alarmstatus +", "# time.sleep(2) # #now, check if latest unique json has no records, if", "# newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr + ' NEW", "latest inv json file # g.save_resp_unique_json(firstresponse, '_latest') # # #also get and save", "+ ' currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7", "in tanklist: #for each unique tank, create a unique file for each tank", "latest inventory json file. # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #", "= p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if latest", "for k in list: # try: # if k['iLocationID']: # print('ID: ' +", "# newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' +", "NEW Inv Count: ' + str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount # nexttransactidstr", "inv above based on count # g = gateway.Gateway() # p = gateway.Process()", "k in d['soap:Body']: # print(k) # break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}}", "str(newinvalrmcount)) # #Step3 - make a second gateway request using the TransactionID to", "the latest inventory json file. # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))", "# nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount # # while more to", "g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv alarm count from the uniquedictresponse #", "AT END! # print('more than 100, need to iterate to latest') # #transactidstr", "# p = gateway.Process() # #step1 - req all tanks and write to", "# for k in list: # try: # if k['iLocationID']: # print('ID: '", "soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp = g.parse_response(soapResponse) # print(dresp)", "of tank ids # print('TankIDs: ' + str(tanklist)) # for item in tanklist:", "<= 0 --> NO NEW INV RECORDS # #MUST USE LATEST UNIQUE JSON", "#returns list # for k in list: # #print(type(k)) # #print(k) # for", "# list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in list: # try:", "file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank in tanklist get latest", "Switch print stmts to log statements # print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------')", "--------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION ''' # --------------------------------------------------------- # # GATEWAY", "#get the next unique json from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) #", "YOU HAVE LESS THAN 100 TANKS! # #TODO: Place thi ALL into a", "' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 - works now, similar", "# print(k) # break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) #", "%d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0): #", "--> NEED TO ITERATE THRU TO GET LATEST # #MUST MAKE SURE YOU", "' currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647')))", "to iterate to latest') # #set transactid and count to first one above", "+ ' Address: ' + str(k['sAddress1'])) # except KeyError: # pass # #Tank", "latest') # elif thecount >= 100: # #ITERATE TO GET THE LATEST INVENTORY", "transaction id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) #", "g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp = g.parse_response(soapResponse) # print(dresp) # INV", "item in tanklist: #for each unique tank, create a unique file for each", "FOR CHANGES VIA GATEWAY # # TODO: Switch print stmts to log statements", "and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) #", "#step1 - request all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap())))", "# elif thecount >= 100: # #ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json;", "the unique json file to get the new transaction id and count #", "100: # break # print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID:", "# #print(tanklist) # for item in tanklist: #for each unique tank, create a", "latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY # p = gateway.Process()", "# print('zzzzz') # time.sleep(180) #sleep for 3mins, increase this later # def build_latest_inv_file():", "#get the latest inventory id for the tank # print('TankID: ' + str(item)", "from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: '", "' + transactidstr) # #get and save unique json reponse # uniquedictresponse =", "to iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() #", "#get the latest inventory id for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) #", "DEL EMPTY LATEST IF PRESENT AT END! # print('more than 100, need to", "time.sleep(1) # #step3 - get latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) #", "+ ' Name: ' + str(k['sLocationName']) # + ' Address: ' + str(k['sAddress1']))", "request using the TransactionID to create unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr)))", "'47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount))", "Count: ' + str(invalrmcount)) # #determine inv count - if less than 100,", "first test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) #", "list of tank ids - THIS IS AN IMPORTANT STEP FOR SEVERAL ITEMS", "'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in d['soap:Body']: # print(k) #", "have already done steps 1 and 3 above - need tank and inv", "new inventory records, use the existing latest') # elif thecount >= 100: #", "p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if latest unique json has no records,", "GET IT AGAIN! # #ELSE YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE", "UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF", "GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST # print('Zero new inventory records, use the", "item in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647')", "INV RECORDS # #MUST USE LATEST UNIQUE JSON FILE FOR INV RECORDS #", "data # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest", "tank in tanklist get latest inventory and display # #note: for this to", "''' EARLY TEST SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] #", "NEW INV RECORDS # #MUST USE LATEST UNIQUE JSON FILE FOR INV RECORDS", "need to iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm()", "') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in list: #", "above based on count # g = gateway.Gateway() # p = gateway.Process() #", "k in list: # try: # if k['iLocationID']: # print('ID: ' + str(k['iLocationID'])", "#step1 - req all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap())))", "# tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response #", "# #get the next unique json from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr)))", "to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than 100') #", "TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST # print('Zero new inventory", "#print(type(k)) # #print(k) # for k, v in k.items(): # if k ==", "# time.sleep(2) #wait 2 secs # #Step2.5 - make a second gateway req", "+ str(k['sLocationName']) # + ' Address: ' + str(k['sAddress1'])) # except KeyError: #", "- first test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid)", "GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else:", "the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' +", "and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...')", "g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') # #step2 - build tank list from", "using the TransactionID to create unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) #", "list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in list: # #print(type(k)) #", "# NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE", "INVCALCALARM # #NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU", "list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in list: # try: #", "# g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv alrm count from the newtransactidstr", "# g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank in tanklist get latest inventory", "# except KeyError: # pass # #Tank example reading the list in Tank", "thecount <= 0: # #No new inv, Use latest unique - BASICALLY THIS", "PRESENT AT END! # print('more than 100, need to iterate to latest') #", "# time.sleep(1) # #step3 - get latest inv and save file # print('writing", "bits') # #TODO: Add function in Process to perform an alarm bits lookup", "g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than 100') # #save as latest inv", "Now parse the unique json file to get the new transaction id and", "#g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount)) # #Step3", "and save unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) #", "# newinvalrmcount = invalrmcount # while newinvalrmcount == 100: # time.sleep(3) # #replaces", "= p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: ' + nexttransactidstr + ' NEW", "newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr", "#Step 5- Repeat as neccessary until count < 100 to get the latest", "0 and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally,", "print(str(datetime.datetime.now()) + ' - wake up...') # #step1 - request all tanks and", "get the TransactionID and Inv Calc Alarm count # p = gateway.Process() #", "# Make the Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse =", "# ''' EARLY TEST SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory']", "# uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse the", "transactid and count to first one above # #nextinvalrmcount = invalrmcount # nextinvalrmcount", "THE GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway() # p = gateway.Process() #", "txt value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) #", "invalrmcount # nextinvalrmcount = thecount # nexttransactidstr = transactidstr # # while more", "< 100: # break # print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW", "file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180) #sleep for 3mins, increase", "working test! # tanklist = p.get_tank_list() #gives list of tank ids # print(tanklist)", "on count # g = gateway.Gateway() # p = gateway.Process() # #step1 -", "logtxt = 'error' # return logtxt # # TEST 9 - modified test", "unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the", "# uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() #", "ALREADY EXIST # print('Zero new inventory records, use the existing latest') # elif", "nexttransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr", "'' # for item in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']),", "print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr) #", "# SETUP RUN TEST TO CHECK FOR CHANGES VIA GATEWAY # # TODO:", "if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime']))", "a second gateway req using the TransactionID to create unique json - first", "# #step1 - req all tanks and write to master tanks file #", "# #print(type(k)) # #print(k) # for k, v in k.items(): # if k", "DEMO APP\\n--------------------------------') # g = gateway.Gateway() # p = gateway.Process() # while True:", "for item in tanklist: # print(item) #test2 # invlist = p.get_inventory_list() # for", "data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to file...') #", "INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT AT END! # print('more than", "print('NEW TransactionID: ' + newtransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount))", "in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works!", "str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for", "' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list", "item in tanklist: # print(item) #test2 # invlist = p.get_inventory_list() # for item", "= p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if latest unique json has no", "g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID:", "count - if less than 100, nothing more to do # nexttolastidstr =", "iterate to latest') # #set transactid and count to first one above #", "next to last id string in case last item has zero records #", "k['iLocationID']: # print('ID: ' + str(k['iLocationID']) + ' Name: ' + str(k['sLocationName']) #", "response and save json file # g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))", "print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in list: #", "UNIQUE JSON FILE FOR INV RECORDS # #ELSE IF COUNT >= 100 -->", "similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display the", "soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr", "count from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count:", "TEST SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime =", "p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) # #set transactid and", "- works now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse", "Add function in Process to perform an alarm bits lookup to decode the", "the existing latest') # elif thecount >= 100: # #ITERATE TO GET THE", "the TransactionID to create unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse,", "id string in case last item has zero records # nexttolastidstr = nexttransactidstr", "str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 - works now, similar to step 4", "# p = gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() #", "= p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) # print('Inventory", "+ str(k['iLocationID']) + ' Name: ' + str(k['sLocationName']) # + ' Address: '", "p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full test", "in d['soap:Body']: # print(k) # break # d = {'ONE':{'TWO':{'THREE':'some txt value'}}} #", "json file to be the generic starting point GetInventoryCalcAlarmResponselatest json file! # if", "TANKS! # #TODO: Place thi ALL into a function that whose job is", "file to be the generic starting point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse))", "item['iCalcAlarmBits']) # f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k", "CHECK FOR CHANGES VIA GATEWAY # # TODO: Switch print stmts to log", "# #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr)", "tanklist: #for each unique tank, create a unique file for each tank #", "# #step5 - works now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7", "for k in list: # #print(type(k)) # #print(k) # for k, v in", "HAVE LESS THAN 100 TANKS!''' # try: # logtxt = '' # g", "EMPTY LATEST IF PRESENT AT END! # print('more than 100, need to iterate", "nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if", "count to first one above # #nextinvalrmcount = invalrmcount # nextinvalrmcount = thecount", "inv and save file # print('writing parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap())))", "step 1 # tanklist = p.get_tank_list() #gives list of tank ids - THIS", "# #Tank example reading the list in Tank value # print('Return code: '", "test #8 for using latest inv above based on count # g =", "first item # p = gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr =", "- req all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) #", "THAN 100 TANKS! # #TODO: Place thi ALL into a function that whose", "# while True: # print(str(datetime.datetime.now()) + ' - wake up...') # #step1 -", "same # # Step2 - Process the json file to get the TransactionID", "# #Step2.5 - make a second gateway req using the TransactionID to create", "# INV ALARM CALC TRANSACTIONID TESTS # # Step1 - make request using", "# import json # --------------------------------------------------------- # ''' EARLY TEST SCENARIOS ''' # ---------------------------------------------------------", "in tanklist get latest inventory and display # #note: for this to work,", "#returns list # for k in list: # try: # if k['iLocationID']: #", "req using the TransactionID to create unique json - first test # testinvtransactid", "EACH UNIQUE JSON! ONCE YOU CALL THE WEB SERVICE WITH TRANSACTID, YOU CANNOT", "gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends on count", "' + str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #", "COUNT >= 100 --> NEED TO ITERATE THRU TO GET LATEST # #MUST", "try: # if k['iTankID']: # print('ID: ' + str(k['iTankID'])) # except KeyError: #", "YOU CANNOT GET IT AGAIN! # #ELSE YOU HAVE THE LATEST INV IN", "set new transactid to that from latest unique json # while True: #", "ITERATE THRU TO GET LATEST # #MUST MAKE SURE YOU SAVE EACH UNIQUE", "# print('Zero new inventory records, use the existing latest') # elif thecount >=", "in tanklist: #display latest inventory for each tank in list # latestinvidstr =", "g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build tank list from file created in", "# print('ID: ' + str(k['iTankID'])) # except KeyError: # pass # --------------------------------------------------------- #", "JSON FILE FOR INV RECORDS # #ELSE IF COUNT >= 100 --> NEED", "# g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for TransactionID ' + nexttransactidstr) #", "secs # #Step2.5 - make a second gateway req using the TransactionID to", "#returns list # for k in list: # try: # if k['iTankID']: #", "and save unique json reponse for the next transactid - IMPORTANT: THIS WILL", "str(k['sLocationName']) # + ' Address: ' + str(k['sAddress1'])) # except KeyError: # pass", "gateway.Gateway() # Make the Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse", "latest unique json # while True: # #save next to last id string", "k == 'iOrganizationID': # print(k, v) # #print(v) # #Loc example reading the", "CANNOT GET IT AGAIN! # #ELSE YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json,", "#display latest inventory for each tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get", "deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty unique inv", "4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display the data # for", "+ transactidstr) # print('Inventory count: ' + str(thecount)) # #IF COUNT <= 0", "# g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display the data # for item", "gateway.Process() # while True: # print(str(datetime.datetime.now()) + ' - wake up...') # #step1", "TRANSACTIONID TESTS # # Step1 - make request using simple inventory soap (ie.", "save unique json reponse for the next transactid - IMPORTANT: THIS WILL GIVE", "testing only # import pprint # import json # --------------------------------------------------------- # ''' EARLY", "count of this first item # p = gateway.Process() # thecount = p.count_inventorycalcalrm()", "elif thecount >= 100: # #ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO", "nexttransactidstr = transactidstr # # while more to get, set new transactid to", "0: # #No new inv, Use latest unique - BASICALLY THIS MEANS NEED", "into a function that whose job is to basically create the latest inventory", "= thecount # nexttransactidstr = transactidstr # # while more to get, set", "# soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp = g.parse_response(soapResponse) #", "# inventorytime = '' # for item in invalrmlist: # if item['sUTCInventoryTime']: #", "TransactionID: ' + newtransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) #", "THRU TO GET LATEST # #MUST MAKE SURE YOU SAVE EACH UNIQUE JSON!", "for each tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory", "json - first test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount =", "Count: ' + str(newinvalrmcount)) # #Step 5- Repeat as neccessary until count <", "- THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF", "display the data # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get", "alrm count from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv", "except KeyError: # pass # #Tank example reading the list in Tank value", "bits lookup to decode the actual alarm state # #RUN.PY TEST # #", "inventory id for the tank # print('TankID: ' + str(item) + ' currently", "transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) # print('Inventory count: ' +", "#replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp", "= gateway.Process() # while True: # print(str(datetime.datetime.now()) + ' - wake up...') #", "# print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item", "# # TEST 9 - modified test #8 for using latest inv above", "in Tank value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ')", "#NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS", "json from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get", "GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount <= 0: # #No new inv,", "from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: '", "nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now,", "= gateway.Gateway() # p = gateway.Process() # while True: # print(str(datetime.datetime.now()) + '", "ALL into a function that whose job is to basically create the latest", "if k['iLocationID']: # print('ID: ' + str(k['iLocationID']) + ' Name: ' + str(k['sLocationName'])", "count less than 100 # if nextinvalrmcount < 100: # break # print('fetching", "list of tank ids # print('TankIDs: ' + str(tanklist)) # for item in", "unique json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3", "created in step 1 # tanklist = p.get_tank_list() #gives list of tank ids", "= gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2", "latest inv and save file # print('writing parsed inventory data to file...') #", "#gives list of tank ids # print(tanklist) # for item in tanklist: #display", "= p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr + ' NEW Inv Count:", "g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank in tanklist get latest inventory and", "#updates nexttransactidstr # # NEW TEST TO GET LATEST INV RECORDS - THIS", "- wake up...') # #step1 - request all tanks and write to master", "+ str(newinvalrmcount)) # #Step 5- Repeat as neccessary until count < 100 to", "#note: for this to work, you must have already done steps 1 and", "file. # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # #", "= g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse", "str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount =", "# # Everything depends on count of this first item # p =", "newtransactidstr #updates nexttransactidstr # # NEW TEST TO GET LATEST INV RECORDS -", "has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # #", "# print(item) #test3 # bothlist = p.get_tankinv_list() # for item in bothlist: #", "nice working test! # tanklist = p.get_tank_list() #gives list of tank ids #", "VIA GATEWAY # # TODO: Switch print stmts to log statements # print('\\nWELCOME", "- full test working thru step 4 - fully working # g =", "FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for item in tanklist: #for each", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv alarm count from", "while True: # #save next to last id string in case last item", "be the generic starting point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) > 0:", "# tanklist = p.get_tank_list() #gives list of tank ids # print('TankIDs: ' +", "'''NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST", "# g.save_resp_unique_json(firstresponse, '_latest') # # #also get and save unique json reponse for", "Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr # #", "is to basically create the latest inventory json file. # g = gateway.Gateway()", "Location value # print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') #", "latest') # #set transactid and count to first one above # nexttransactidstr =", "IS AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for item", "if less than 100, nothing more to do # nexttolastidstr = '' #", "Inv Count: ' + str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount # nexttransactidstr =", "latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' +", "# --------------------------------------------------------- # # GATEWAY SOAP GEN AND REQUEST TESTS # g =", "id for the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has", "pass # --------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION ''' # --------------------------------------------------------- #", "create the latest inventory json file. # g = gateway.Gateway() # firstresponse =", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv alarm count from the", "# transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' +", "TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD", "on count of this first item # p = gateway.Process() # thecount =", "count from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count:", "# #MUST USE LATEST UNIQUE JSON FILE FOR INV RECORDS # #ELSE IF", "# #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount)) #", "100: # #ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST", "that whose job is to basically create the latest inventory json file. #", "#for each unique tank, create a unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))),", "print(' NEW Inv Count: ' + str(invalrmcount)) # #determine inv count - if", "# uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for TransactionID", "the latest non-empty unique inv json file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest')", "get, set new transactid to that from latest unique json # while True:", "file # g.save_resp_unique_json(firstresponse, '_latest') # # #also get and save unique json reponse", "g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response", "#gives list of tank ids # print('TankIDs: ' + str(tanklist)) # for item", "g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt = 'error' # return logtxt # #", "using latest inv above based on count # g = gateway.Gateway() # p", "to first one above # nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount #", "json file to get the new transaction id and count # newtransactidstr =", "#save as latest inv json file # g.save_resp_unique_json(firstresponse, '_latest') # # #also get", "# g = gateway.Gateway() # Make the Request to Gateway # soapResponse =", "= p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: ' + str(newinvalrmcount)) # #update nexttransactid", "NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr", "in tanklist: # print(item) #test2 # invlist = p.get_inventory_list() # for item in", "# nexttolastidstr = nexttransactidstr # #break while loop if count less than 100", "tank list from file created in step 1 # tanklist = p.get_tank_list() #gives", "# print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway() # p", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt = 'error' # return", "''' REAL GATEWAY TEST SECTION ''' # --------------------------------------------------------- # # GATEWAY SOAP GEN", "in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list", "and inv # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the", "--------------------------------------------------------- # ''' EARLY TEST SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist =", "LESS THAN 100 TANKS! # #TODO: Place thi ALL into a function that", "# #step2 - build tank list from file created in step 1 #", "p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) +", "9 - modified test #8 for using latest inv above based on count", "all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) #", "to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build tank", "calc alarm bits') # #TODO: Add function in Process to perform an alarm", "= invalrmcount # # while more to get, set new transactid to that", "' + newtransactidstr) # #get the next unique json from gateway request #", "p.get_tankinv_list() # for item in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 #", "# print(k, v) # #print(v) # #Loc example reading the list in Location", "YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY", "# print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k", "print('ID: ' + str(k['iTankID'])) # except KeyError: # pass # --------------------------------------------------------- # '''", "= p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if latest", "Inv Count: ' + str(newinvalrmcount)) # #Step 5- Repeat as neccessary until count", "MAKE SURE YOU SAVE EACH UNIQUE JSON! ONCE YOU CALL THE WEB SERVICE", "list from file created in step 1 # tanklist = p.get_tank_list() #gives list", "print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in", "nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if latest unique json has", "= d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in list: # #print(type(k)) # #print(k)", "as latest inv json file # g.save_resp_unique_json(firstresponse, '_latest') # # #also get and", "to fix # #Org example reading the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns'])", "example reading the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list", "LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE:", "# --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for", "# g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv alarm count from the uniquedictresponse", "neccessary until count < 100 to get the latest inventory # nexttransactidstr =", "and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename", "INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS", "#MUST USE LATEST UNIQUE JSON FILE FOR INV RECORDS # #ELSE IF COUNT", "# deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty unique", "soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse", "# PROCESSING TEST SECTION ONLY # p = gateway.Process() #test1 # tanklist =", "# print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr)", "p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: '", "f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in d['soap:Body']:", "the unique inv json file to be the generic starting point GetInventoryCalcAlarmResponselatest json", "THIS WILL GIVE AN EMPTY NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) #", "print('Zero new inventory records, use the existing latest') # elif thecount >= 100:", "# #step1 - request all tanks and write to master tanks file #", "tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2", "to work, you must have already done steps 1 and 3 above -", "# bothlist = p.get_tankinv_list() # for item in bothlist: # print(item) #test4 #", "# print(item) #need to fix # #Org example reading the list in Organization", "has no records, if so delete it # if len(nexttolastidstr) > 0 and", "pass # #Tank example reading the list in Tank value # print('Return code:", "# print('more than 100, need to iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID()", "function in Process to perform an alarm bits lookup to decode the actual", "+ transactidstr + ' Inv Count: ' + str(invalrmcount)) # time.sleep(2) #wait 2", "tank ids # print('TankIDs: ' + str(tanklist)) # for item in tanklist: #for", "newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the", "p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if latest unique", "newinvalrmcount = invalrmcount # while newinvalrmcount == 100: # time.sleep(3) # #replaces step3", "unique inv json file to be the generic starting point GetInventoryCalcAlarmResponselatest json file!", "<= 0: # #No new inv, Use latest unique - BASICALLY THIS MEANS", "' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 -", "datetime #Imports currently used for testing only # import pprint # import json", "> 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than 100, have latest')", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse the unique json", "in k.items(): # if k == 'iOrganizationID': # print(k, v) # #print(v) #", "= p.get_tank_list() #gives list of tank ids # print('TankIDs: ' + str(tanklist)) #", "nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount # while newinvalrmcount == 100: #", "# try: # if k['iLocationID']: # print('ID: ' + str(k['iLocationID']) + ' Name:", "for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 - get latest", "PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD OF GETTING LATEST", "# print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist:", "= '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) # newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' +", "less than 100, nothing more to do # nexttolastidstr = '' # newuniquedictresponse", "for k, v in k.items(): # if k == 'iOrganizationID': # print(k, v)", "# #step7 - parse and display the data # for item in tanklist:", "- modified test #8 for using latest inv above based on count #", "# alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': # print('Tank ' +", "newinvalrmcount = p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount)) # #Step3 - make", "+ newtransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) # #Step 5-", "# #print(v) # #Loc example reading the list in Location value # print('Return", "import pprint # import json # --------------------------------------------------------- # ''' EARLY TEST SCENARIOS '''", "TANKS!''' # try: # logtxt = '' # g = gateway.Gateway() # firstresponse", "TransactionID ' + nexttransactidstr) # #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount", "# soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) #", "transactidstr + ' Inv Count: ' + str(invalrmcount)) # time.sleep(2) #wait 2 secs", "+ ' Inv Count: ' + str(invalrmcount)) # time.sleep(2) #wait 2 secs #", "# nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check if latest unique json", "a unique json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) #", "has alarm status ' + item['iCalcAlarmBits']) # f = open('temp.json', 'w') # f.write(json.dumps(resp,", "# g = gateway.Gateway() # p = gateway.Process() # while True: # print(str(datetime.datetime.now())", "data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180) #sleep for", "GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST #", "p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: ' + str(newinvalrmcount)) # #update nexttransactid and", "p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID:", "for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id", "until count < 100 to get the latest inventory # nexttransactidstr = transactidstr", "inventory soap (ie. zero as ACK code), parse response and save json file", "YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY", "THIS IS AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for", "GATEWAY TEST SECTION ''' # --------------------------------------------------------- # # GATEWAY SOAP GEN AND REQUEST", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID() # # invalrmcount", "#step3 - get latest inv and save file # print('writing parsed inventory data", "p.get_tank_list() #gives list of tank ids - THIS IS AN IMPORTANT STEP FOR", "SURE YOU SAVE EACH UNIQUE JSON! ONCE YOU CALL THE WEB SERVICE WITH", "print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0): # # print('Tank ' + item['iTankID']", "SOAP GEN AND REQUEST TESTS # g = gateway.Gateway() # Make the Request", "and Inv Calc Alarm count # p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID()", "# g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything", "# #step3 - get latest inv and save file # print('writing parsed inventory", "0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than 100, have latest') #", "fully working # g = gateway.Gateway() # p = gateway.Process() # #step1 -", "datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] !=", "# nextinvalrmcount = invalrmcount # # while more to get, set new transactid", "# #Step4 - Now parse the unique json file to get the new", "inv alarm count from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW", "TEST TO CHECK FOR CHANGES VIA GATEWAY # # TODO: Switch print stmts", "gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: ' +", "RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # #NOTE: THIS METHOD", "p = gateway.Process() #test1 # tanklist = p.get_tank_list() # for item in tanklist:", "Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list #", "# + ' Address: ' + str(k['sAddress1'])) # except KeyError: # pass #", "# #determine inv count - if less than 100, nothing more to do", "must have already done steps 1 and 3 above - need tank and", "inv json file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less", "= '' # for item in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object =", "= gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends on", "ITEMS BELOW!!!!!!! # #print(tanklist) # for item in tanklist: #for each unique tank,", "tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved", "# g.save_resp_json(firstresponse) # # Everything depends on count of this first item #", "# print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #", "= 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv json file to", "LATEST # #MUST MAKE SURE YOU SAVE EACH UNIQUE JSON! ONCE YOU CALL", "the next unique json from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse,", "newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: ' + nexttransactidstr + '", "delete it # if len(nexttolastidstr) > 0 and newinvalrmcount < 1: # deletresponsestr", "Inv Calc Alarm count # p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() #", "# p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() #", "gateway req using the TransactionID to create unique json - first test #", "to basically create the latest inventory json file. # g = gateway.Gateway() #", "g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv json file to be the generic", "create unique json - first test # testinvtransactid = '47174434' # #g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(testinvtransactid))) #", "# invlist = p.get_inventory_list() # for item in invlist: # print(item) #test3 #", "the next transactid - IMPORTANT: THIS WILL GIVE AN EMPTY NEXT REPONSE #", "time.sleep(2) #wait 2 secs # #Step2.5 - make a second gateway req using", "# latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank #", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2 - Process the json", "bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! #", "latest unique json has no records, if so delete it # if len(nexttolastidstr)", "modified test #8 for using latest inv above based on count # g", "# NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES YOU", "print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice working", "print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6", "and 3 above - need tank and inv # for item in tanklist:", "print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in", "next unique json from gateway request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr)", "loop if count less than 100 # if nextinvalrmcount < 100: # break", "bothlist = p.get_tankinv_list() # for item in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940'))", "decode the actual alarm state # #RUN.PY TEST # # SETUP RUN TEST", "= p.get_tankinv_list() # for item in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5", "nothing more to do # nexttolastidstr = '' # newuniquedictresponse = [] #", "simple inventory soap (ie. zero as ACK code), parse response and save json", "COUNT <= 0 --> NO NEW INV RECORDS # #MUST USE LATEST UNIQUE", "datetime import datetime #Imports currently used for testing only # import pprint #", "GATEWAY # # TODO: Switch print stmts to log statements # print('\\nWELCOME TO", "save the latest non-empty unique inv json file to the latest # g.save_resp_unique_json(newuniquedictresponse,", "TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE", "Place thi ALL into a function that whose job is to basically create", "GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway() # p = gateway.Process() # while", "Step2 - Process the json file to get the TransactionID and Inv Calc", "each unique tank, create a unique json file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))),", "' + str(invalrmcount)) # #determine inv count - if less than 100, nothing", "list: # try: # if k['iLocationID']: # print('ID: ' + str(k['iLocationID']) + '", "gateway.Gateway() # p = gateway.Process() # #step1 - req all tanks and write", "# break # print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: '", "have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY # p =", "parse response and save json file # g = gateway.Gateway() # dictresponse =", "k in list: # #print(type(k)) # #print(k) # for k, v in k.items():", "' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list", "THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT AT END! #", "= p.get_inventorycalcalrm_transactID() # print('TransactID: ' + transactidstr) # print('Inventory count: ' + str(thecount))", "# if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') #", "= p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # alarmstatus =", "# print(dresp) # INV ALARM CALC TRANSACTIONID TESTS # # Step1 - make", "INV ALARM CALC TRANSACTIONID TESTS # # Step1 - make request using simple", "Tank value # print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') #", "#print(tanklist) # for item in tanklist: #for each unique tank, create a unique", "# print('TankIDs: ' + str(tanklist)) # for item in tanklist: #for each unique", "in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the", "working # g = gateway.Gateway() # p = gateway.Process() # #step1 - req", "# print(str(datetime.datetime.now()) + ' - wake up...') # #step1 - request all tanks", "= transactidstr # # while more to get, set new transactid to that", "#Step3 - make a second gateway request using the TransactionID to create unique", "non-empty unique inv json file to the latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else:", "len(nexttolastidstr) > 0 and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr))", "AGAIN! # #ELSE YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST", "job is to basically create the latest inventory json file. # g =", "create a unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) #", "# print('Return code: ' + str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list =", "item['iCalcAlarmBits'] != str(0): # # print('Tank ' + item['iTankID'] + ' has alarm", "THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount <= 0:", "print(item) #test2 # invlist = p.get_inventory_list() # for item in invlist: # print(item)", "inventory id for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus !=", "in step 1 # tanklist = p.get_tank_list() #gives list of tank ids -", "# print('new count: ' + str(newinvalrmcount)) # #Step3 - make a second gateway", "open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in d['soap:Body']: # print(k)", "inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to file...')", "#get the latest inventory id for the tank # print('Tank ' + p.get_tankname_bytankid_file(str(item))", "Count: ' + str(invalrmcount)) # time.sleep(2) #wait 2 secs # #Step2.5 - make", "100, need to iterate to latest') # #set transactid and count to first", "= p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr + '", "else: # print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST", "# p = gateway.Process() # while True: # print(str(datetime.datetime.now()) + ' - wake", "statements # print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway() #", "vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8", "g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay #", "g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # #", "ONLY # p = gateway.Process() #test1 # tanklist = p.get_tank_list() # for item", "reading the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) # list =", "8 - full test working thru step 4 - fully working # g", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2 - Process the json file", "count to first one above # nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount", "# #get the new inv alarm count from the uniquedictresponse # invalrmcount =", "REAL GATEWAY TEST SECTION ''' # --------------------------------------------------------- # # GATEWAY SOAP GEN AND", "than 100 # if nextinvalrmcount < 100: # break # print('fetching next...') #", "# list = d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in list: # #print(type(k))", "ids - THIS IS AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist)", "g = gateway.Gateway() # p = gateway.Process() # while True: # print(str(datetime.datetime.now()) +", "unique tank, create a unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) #", "item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) #", "# print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status of '", "newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: ' + str(newinvalrmcount)) # #update", "print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for", "for this to work, you must have already done steps 1 and 3", "for the next transactid - IMPORTANT: THIS WILL GIVE AN EMPTY NEXT REPONSE", "#step4 - for each tank in tanklist get latest inventory and display #", "YOU CALL THE WEB SERVICE WITH TRANSACTID, YOU CANNOT GET IT AGAIN! #", "WITH TRANSACTID, YOU CANNOT GET IT AGAIN! # #ELSE YOU HAVE THE LATEST", "thecount >= 100: # #ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL", "unique json # while True: # #save next to last id string in", "if k['iTankID']: # print('ID: ' + str(k['iTankID'])) # except KeyError: # pass #", "newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr) # #get the next", "# # GATEWAY SOAP GEN AND REQUEST TESTS # g = gateway.Gateway() #", "log statements # print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') # g = gateway.Gateway()", "new transaction id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr)", "# #ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF", "if len(nexttolastidstr) > 0 and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' #", "invlist = p.get_inventory_list() # for item in invlist: # print(item) #test3 # bothlist", "TO GET LATEST # #MUST MAKE SURE YOU SAVE EACH UNIQUE JSON! ONCE", "%p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits'] != str(0): # # print('Tank '", "# nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if latest unique json", "the latest inventory id for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if", "latest inventory # nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount # while newinvalrmcount", "WORKS IF YOU HAVE LESS THAN 100 TANKS!''' # try: # logtxt =", "#test1 # tanklist = p.get_tank_list() # for item in tanklist: # print(item) #test2", "# g = gateway.Gateway() # p = gateway.Process() # #step1 - req all", "# # print('Tank ' + item['iTankID'] + ' has alarm status ' +", "ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS! # #TODO: Place thi", "break # print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' +", "+ alarmstatus + ' calc alarm bits') # #TODO: Add function in Process", "THAT SOULD ALREADY EXIST # print('Zero new inventory records, use the existing latest')", "LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS!''' # try:", "# #update nexttransactid and nextinvalrmcount # nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr)", "soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse", "#No new inv, Use latest unique - BASICALLY THIS MEANS NEED TO COMPARE", "perform an alarm bits lookup to decode the actual alarm state # #RUN.PY", "# soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) #", "= p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # print('TankID: '", "#determine inv count - if less than 100, nothing more to do #", "# + alarmstatus + ' calc alarm bits') # #TODO: Add function in", "import datetime #Imports currently used for testing only # import pprint # import", "# uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv alarm", "import time from datetime import datetime #Imports currently used for testing only #", "# tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: # print(item) #need to", "newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr + ' NEW Inv", "str(0): # # print('Tank ' + item['iTankID'] + ' has alarm status '", "json file to get the TransactionID and Inv Calc Alarm count # p", "that from latest unique json # while True: # #save next to last", "print('retrieved tanks...') # #step2 - build tank list from file created in step", "write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build", "the same # # Step2 - Process the json file to get the", "unique json file to get the new transaction id and count # newtransactidstr", "# for item in tanklist: # print(item) #test2 # invlist = p.get_inventory_list() #", "g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends on count of this first item", "of this first item # p = gateway.Process() # thecount = p.count_inventorycalcalrm() #", "# # Step1 - make request using simple inventory soap (ie. zero as", "g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap())", "+ str(item) + ' currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + '", "nexttolastidstr = '' # newuniquedictresponse = [] # if invalrmcount == 100: #", "EARLY TEST SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist = d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime", "# g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') # #step2 - build tank list", "invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr + ' Inv Count: '", "one above # nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount # # while", "END! # print('more than 100, need to iterate to latest') # #transactidstr =", "SERVICE WITH TRANSACTID, YOU CANNOT GET IT AGAIN! # #ELSE YOU HAVE THE", "vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 - works", "LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS! # #TODO:", "master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') # #step2 -", "- THIS IS AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) #", "#8 for using latest inv above based on count # g = gateway.Gateway()", "LATEST # if thecount <= 0: # #No new inv, Use latest unique", "# for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory", "unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 -", "100, need to iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount =", "' NEW Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr", "# g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than 100') # #save as latest", "#TODO: Add function in Process to perform an alarm bits lookup to decode", "= p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr +", "# Everything depends on count of this first item # p = gateway.Process()", "100, nothing more to do # nexttolastidstr = '' # newuniquedictresponse = []", "inv json file to be the generic starting point GetInventoryCalcAlarmResponselatest json file! #", "# print('TransactionID: ' + transactidstr) # #get and save unique json reponse #", "#test2 # invlist = p.get_inventory_list() # for item in invlist: # print(item) #test3", "new transactid to that from latest unique json # while True: # #save", "# firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) # # Everything depends on count of", "transactid and count to first one above # nexttransactidstr = transactidstr # nextinvalrmcount", "p.get_tank_list() #gives list of tank ids # print(tanklist) # for item in tanklist:", "str(newinvalrmcount)) # #Step 5- Repeat as neccessary until count < 100 to get", "from latest unique json # while True: # #save next to last id", "# time.sleep(3) # #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) #", "p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr) # #get the next unique json", "' + str(newinvalrmcount)) # #Step3 - make a second gateway request using the", "gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') # #step5 -", "' has alarm status ' + item['iCalcAlarmBits']) # f = open('temp.json', 'w') #", "sort_keys=True, indent=4)) # for k in d['soap:Body']: # print(k) # break # d", "thi ALL into a function that whose job is to basically create the", "alarm state # #RUN.PY TEST # # SETUP RUN TEST TO CHECK FOR", "second gateway req using the TransactionID to create unique json - first test", "KeyError: # pass # --------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION ''' #", "SECTION ''' # --------------------------------------------------------- # # GATEWAY SOAP GEN AND REQUEST TESTS #", "invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p')", "k in list: # try: # if k['iTankID']: # print('ID: ' + str(k['iTankID']))", "GET LATEST # #MUST MAKE SURE YOU SAVE EACH UNIQUE JSON! ONCE YOU", "transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr +", "' currently has gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals')", "later # def build_latest_inv_file(): # '''NEW TEST TO GET LATEST INV RECORDS -", "file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build tank list from file", "a unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3", "= p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # print('Tank '", "# logtxt = '' # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #", "unique json reponse for the next transactid - IMPORTANT: THIS WILL GIVE AN", "' # + alarmstatus + ' calc alarm bits') # #TODO: Add function", "= gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: '", "> 0 and newinvalrmcount < 1: # deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) #", "step 1 # tanklist = p.get_tank_list() #gives list of tank ids # print('TankIDs:", "#transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) #", "file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 - get", "print stmts to log statements # print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') #", "INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS!''' # try: #", "use the existing latest') # elif thecount >= 100: # #ITERATE TO GET", "TODO: Switch print stmts to log statements # print('\\nWELCOME TO THE GATEWAY DEMO", "alarm status of ' # + alarmstatus + ' calc alarm bits') #", "= '10203647' # soapResponse = g.gateway_request(soapreqs.get_tankgenlatlon_soap(tankgenlatlonstr)) # # Parse response # dresp =", "- Now parse the unique json file to get the new transaction id", "KeyError: # pass # #Tank example reading the list in Tank value #", "+ str(newinvalrmcount)) # #Step3 - make a second gateway request using the TransactionID", "logtxt = '' # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse)", "PROCESSING TEST SECTION ONLY # p = gateway.Process() #test1 # tanklist = p.get_tank_list()", "count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: '", "SAVE EACH UNIQUE JSON! ONCE YOU CALL THE WEB SERVICE WITH TRANSACTID, YOU", "# print('fetching next...') # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # print('NEW TransactionID: ' + newtransactidstr)", "# Parse response # dresp = g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC", "while True: # print(str(datetime.datetime.now()) + ' - wake up...') # #step1 - request", "len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than 100, have", "p.count_inventorycalcalrm_unique(transactidstr) # print('NEW TransactionID: ' + newtransactidstr + ' NEW Inv Count: '", "# print('NEW TransactionID: ' + nexttransactidstr + ' NEW Inv Count: ' +", "NEW Inv Count: ' + str(invalrmcount)) # #set transactid and count to first", "# f.write(json.dumps(resp, sort_keys=True, indent=4)) # for k in d['soap:Body']: # print(k) # break", "100: # print('more than 100, need to iterate to latest') # #set transactid", "file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') # #step2 - build tank", "more to get, set new transactid to that from latest unique json #", "# # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt", "# # g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt = 'error' # return logtxt", "THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD OF GETTING", "#get the new inv alrm count from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr)", "# #delay # print('zzzzz') # time.sleep(180) #sleep for 3mins, increase this later #", "g.save_resp_json(firstresponse) # # Everything depends on count of this first item # p", "inventory for each tank in list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest", "above # nexttransactidstr = transactidstr # nextinvalrmcount = invalrmcount # # while more", "print('writing parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') #", "a second gateway request using the TransactionID to create unique json file #", "1 and 3 above - need tank and inv # for item in", "print('TankIDs: ' + str(tanklist)) # for item in tanklist: #for each unique tank,", "unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 -", "' + str(tanklist)) # for item in tanklist: #for each unique tank, create", "# print(tanklist) # for item in tanklist: #display latest inventory for each tank", "master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build tank list", "tank # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol of", "GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS! #", "print('writing parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data", "inv # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest", "# nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(3) # #now, check", "d['soap:Body']['GetOrganizationResponse']['GetOrganizationResult']['Organization'] #returns list # for k in list: # #print(type(k)) # #print(k) #", "INVCALCALARM # NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU", "== 100: # time.sleep(3) # #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse,", "# while newinvalrmcount == 100: # time.sleep(3) # #replaces step3 # uniquedictresponse =", "# def build_latest_inv_file(): # '''NEW TEST TO GET LATEST INV RECORDS - THIS", "tanklist = p.get_tank_list() #gives list of tank ids - THIS IS AN IMPORTANT", "get the new transaction id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount", "dresp = g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC TRANSACTIONID TESTS # #", "# #Loc example reading the list in Location value # print('Return code: '", "# time.sleep(3) # #now, check if latest unique json has no records, if", "+ ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) # # TEST 8 - full test working", "# soapResponse = g.gateway_request(soapreqs.get_tank_soap()) # soapResponse = g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) #", "#ELSE IF COUNT >= 100 --> NEED TO ITERATE THRU TO GET LATEST", "currently used for testing only # import pprint # import json # ---------------------------------------------------------", "zero as ACK code), parse response and save json file # g =", "IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for item in tanklist:", "= p.count_inventorycalcalrm_unique(testinvtransactid) # print('new count: ' + str(newinvalrmcount)) # #Step3 - make a", "+ str(thecount)) # #IF COUNT <= 0 --> NO NEW INV RECORDS #", "# print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k", "+ str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr # # NEW TEST TO", "' + str(k['iTankID'])) # except KeyError: # pass # --------------------------------------------------------- # ''' REAL", "transactidstr) # #Step4 - Now parse the unique json file to get the", "= p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID:", "v in k.items(): # if k == 'iOrganizationID': # print(k, v) # #print(v)", "# deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv json", "has alarm status of ' # + alarmstatus + ' calc alarm bits')", "only # import pprint # import json # --------------------------------------------------------- # ''' EARLY TEST", "WEB SERVICE WITH TRANSACTID, YOU CANNOT GET IT AGAIN! # #ELSE YOU HAVE", "v) # #print(v) # #Loc example reading the list in Location value #", "Process the json file to get the TransactionID and Inv Calc Alarm count", "# except KeyError: # pass # --------------------------------------------------------- # ''' REAL GATEWAY TEST SECTION", "alarm bits') # #TODO: Add function in Process to perform an alarm bits", "# g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build tank list from file created", "# print(' NEW Inv Count: ' + str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount", "# #note: for this to work, you must have already done steps 1", "g.save_resp_unique_json(firstresponse, '_latest') # # #also get and save unique json reponse for the", "to perform an alarm bits lookup to decode the actual alarm state #", "OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS!'''", "tanklist get latest inventory and display # #note: for this to work, you", "count # p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm()", "json reponse for the next transactid - IMPORTANT: THIS WILL GIVE AN EMPTY", "need to iterate to latest') # #set transactid and count to first one", "SETUP RUN TEST TO CHECK FOR CHANGES VIA GATEWAY # # TODO: Switch", "transactidstr # newinvalrmcount = invalrmcount # while newinvalrmcount == 100: # time.sleep(3) #", "SOULD ALREADY EXIST # print('Zero new inventory records, use the existing latest') #", "NEED TO ITERATE THRU TO GET LATEST # #MUST MAKE SURE YOU SAVE", "gateway.Process() #test1 # tanklist = p.get_tank_list() # for item in tanklist: # print(item)", "the new transaction id and count # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(transactidstr) # newinvalrmcount =", "# # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr = p.get_inventorycalcalrm_transactID()", "need tank and inv # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item))", "print('Created unique json for TransactionID ' + nexttransactidstr) # #replaces step4 # newinvalrmcount", "# except: # logtxt = 'error' # return logtxt # # TEST 9", "alarm bits lookup to decode the actual alarm state # #RUN.PY TEST #", "GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT AT END!", "' + nexttransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) # nexttransactidstr", "TEST SECTION ONLY # p = gateway.Process() #test1 # tanklist = p.get_tank_list() #", "IF YOU HAVE LESS THAN 100 TANKS! # #TODO: Place thi ALL into", "alarmstatus + ' calc alarm bits') # #TODO: Add function in Process to", "JSON! ONCE YOU CALL THE WEB SERVICE WITH TRANSACTID, YOU CANNOT GET IT", "step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for", "# tanklist = p.get_tank_list() # for item in tanklist: # print(item) #test2 #", "# dresp = g.parse_response(soapResponse) # print(dresp) # INV ALARM CALC TRANSACTIONID TESTS #", "# print('ID: ' + str(k['iLocationID']) + ' Name: ' + str(k['sLocationName']) # +", "item) # time.sleep(1) # #step3 - get latest inv and save file #", "build_latest_inv_file(): # '''NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES", "latest # g.save_resp_unique_json(newuniquedictresponse, '_latest') # else: # print('Less than 100') # #save as", "100 --> NEED TO ITERATE THRU TO GET LATEST # #MUST MAKE SURE", "the new inv alrm count from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) #", "# #save next to last id string in case last item has zero", "# nexttransactidstr = transactidstr # # while more to get, set new transactid", "# '''NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES YOU", "= datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S %p') # print(str(item['sUTCInventoryTime'])) # # if item['iCalcAlarmBits']", "p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if latest unique", "+ nexttransactidstr) # #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr", "print('NEW TransactionID: ' + nexttransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount))", "# invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and save", "print('TransactionID: ' + transactidstr + ' Inv Count: ' + str(invalrmcount)) # time.sleep(2)", "100 # if nextinvalrmcount < 100: # break # print('fetching next...') # newtransactidstr", "tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(1) # print('retrieved tanks...') # #step2 - build", "print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: # print(item) #need", "# #MUST MAKE SURE YOU SAVE EACH UNIQUE JSON! ONCE YOU CALL THE", "# print('NEW TransactionID: ' + newtransactidstr) # #get the next unique json from", "# #get and save unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse,", "#gives list of tank ids - THIS IS AN IMPORTANT STEP FOR SEVERAL", "#Org example reading the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode']) #", "HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount <=", "OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS THAN 100 TANKS!", "#RUN.PY TEST # # SETUP RUN TEST TO CHECK FOR CHANGES VIA GATEWAY", "create unique json file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4", "p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice working test! # tanklist = p.get_tank_list()", "# # SETUP RUN TEST TO CHECK FOR CHANGES VIA GATEWAY # #", "status ' + item['iCalcAlarmBits']) # f = open('temp.json', 'w') # f.write(json.dumps(resp, sort_keys=True, indent=4))", "str(d['soap:Body']['GetTankResponse']['iErrorCode'])) # print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for", "# #get the new inv alrm count from the newtransactidstr # newinvalrmcount =", "' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr # # NEW TEST", "zero records # nexttolastidstr = nexttransactidstr # #break while loop if count less", "new inv alarm count from the uniquedictresponse # invalrmcount = p.count_inventorycalcalrm_unique(transactidstr) # print('", "str(k['iLocationID']) + ' Name: ' + str(k['sLocationName']) # + ' Address: ' +", "INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount <= 0: # #No", "# if thecount <= 0: # #No new inv, Use latest unique -", "to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())))", "# #transactidstr = p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr)", "json for TransactionID ' + nexttransactidstr) # #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr)", "file # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #Step4 - Now parse", "request # newuniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(newtransactidstr))) # g.save_resp_unique_json(newuniquedictresponse, newtransactidstr) # #get the new inv", "ACK code), parse response and save json file # g = gateway.Gateway() #", "try: # logtxt = '' # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))", "save json file # g = gateway.Gateway() # dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works", "invalrmcount == 100: # print('more than 100, need to iterate to latest') #", "# g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 - get latest inv and save", "indent=4)) # for k in d['soap:Body']: # print(k) # break # d =", "Name: ' + str(k['sLocationName']) # + ' Address: ' + str(k['sAddress1'])) # except", "req all tanks and write to master tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2)", "# g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse,", "print('Return code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location']", "thru step 4 - fully working # g = gateway.Gateway() # p =", "in list: # try: # if k['iTankID']: # print('ID: ' + str(k['iTankID'])) #", "else: # print('Less than 100') # #save as latest inv json file #", "List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in list:", "# #Step3 - make a second gateway request using the TransactionID to create", "gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' +", "# #also get and save unique json reponse for the next transactid -", "based on count # g = gateway.Gateway() # p = gateway.Process() # #step1", "already done steps 1 and 3 above - need tank and inv #", "ALSO DEL EMPTY LATEST IF PRESENT AT END! # print('more than 100, need", "') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in list: #", "print('Tank List: ') # list = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] #returns list # for k in", "latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank # print('Tank", "if item['iCalcAlarmBits'] != str(0): # # print('Tank ' + item['iTankID'] + ' has", "display # #note: for this to work, you must have already done steps", "# print('TransactionID: ' + transactidstr + ' Inv Count: ' + str(invalrmcount)) #", "transactidstr) # except: # logtxt = 'error' # return logtxt # # TEST", "the data # for item in tanklist: # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the", "p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and save unique json reponse", "to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay # print('zzzzz') # time.sleep(180) #sleep for 3mins,", "#Imports currently used for testing only # import pprint # import json #", "p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and", "COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY", "uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new inv alarm count", "get the latest inventory # nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount #", "# print('Tank ' + item['iTankID'] + ' has alarm status ' + item['iCalcAlarmBits'])", "tank ids - THIS IS AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!! #", "full test working thru step 4 - fully working # g = gateway.Gateway()", "if so delete it # if len(nexttolastidstr) > 0 and newinvalrmcount < 1:", "# nexttolastidstr = '' # newuniquedictresponse = [] # if invalrmcount == 100:", "to decode the actual alarm state # #RUN.PY TEST # # SETUP RUN", "print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has alarm status of ' #", "NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF YOU HAVE LESS", "'' # newuniquedictresponse = [] # if invalrmcount == 100: # print('more than", "string in case last item has zero records # nexttolastidstr = nexttransactidstr #", "+ str(invalrmcount)) # #determine inv count - if less than 100, nothing more", "and display # #note: for this to work, you must have already done", "# # if item['iCalcAlarmBits'] != str(0): # # print('Tank ' + item['iTankID'] +", "get latest inventory and display # #note: for this to work, you must", "p.get_tank_list() #gives list of tank ids # print('TankIDs: ' + str(tanklist)) # for", "g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item) # time.sleep(1) # #step3 - get latest inv and save file", "+ str(tanklist)) # for item in tanklist: #for each unique tank, create a", "id for the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0':", "point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse, 'latest') #", "to that from latest unique json # while True: # #save next to", "# print(' NEW Inv Count: ' + str(invalrmcount)) # #set transactid and count", "NEW TEST TO GET LATEST INV RECORDS - THIS PROCESS GIVES YOU LATEST", "EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT SOULD ALREADY EXIST", "inv json file # g.save_resp_unique_json(firstresponse, '_latest') # # #also get and save unique", "' + str(k['sLocationName']) # + ' Address: ' + str(k['sAddress1'])) # except KeyError:", "for item in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr =", "- fully working # g = gateway.Gateway() # p = gateway.Process() # #step1", "'iOrganizationID': # print(k, v) # #print(v) # #Loc example reading the list in", "inv count - if less than 100, nothing more to do # nexttolastidstr", "print(' NEW Inv Count: ' + str(invalrmcount)) # #set transactid and count to", "inventory json file. # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse)", "#TODO: Place thi ALL into a function that whose job is to basically", "if invalrmcount == 100: # print('more than 100, need to iterate to latest')", "# time.sleep(180) #sleep for 3mins, increase this later # def build_latest_inv_file(): # '''NEW", "parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to", "the latest inventory # nexttransactidstr = transactidstr # newinvalrmcount = invalrmcount # while", "the TransactionID to create unique json - first test # testinvtransactid = '47174434'", "' NEW Inv Count: ' + str(newinvalrmcount)) # #Step 5- Repeat as neccessary", "work, you must have already done steps 1 and 3 above - need", "# --------------------------------------------------------- # ''' EARLY TEST SCENARIOS ''' # --------------------------------------------------------- # # invalrmlist", "' + item['iTankID'] + ' has alarm status ' + item['iCalcAlarmBits']) # f", "for TransactionID ' + nexttransactidstr) # #replaces step4 # newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates", "str(item) + ' currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals')", "d['soap:Body']['GetInventoryCalcAlarmResponse']['GetInventoryCalcAlarmResult']['CalcAlarmInventory'] # inventorytime = '' # for item in invalrmlist: # if item['sUTCInventoryTime']:", "= p.get_inventorycalcalrm_transactID() # #invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get", "LATEST UNIQUE JSON FILE FOR INV RECORDS # #ELSE IF COUNT >= 100", "if count less than 100 # if nextinvalrmcount < 100: # break #", "stmts to log statements # print('\\nWELCOME TO THE GATEWAY DEMO APP\\n--------------------------------') # g", "of tank ids # print(tanklist) # for item in tanklist: #display latest inventory", "# for item in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m", "for using latest inv above based on count # g = gateway.Gateway() #", "transactid to that from latest unique json # while True: # #save next", "+ ' NEW Inv Count: ' + str(newinvalrmcount)) # #Step 5- Repeat as", "#save next to last id string in case last item has zero records", "item has zero records # nexttolastidstr = nexttransactidstr # #break while loop if", "as neccessary until count < 100 to get the latest inventory # nexttransactidstr", "TO LATEST # if thecount <= 0: # #No new inv, Use latest", "in list: # try: # if k['iLocationID']: # print('ID: ' + str(k['iLocationID']) +", "# time.sleep(1) # #step3 - get latest inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap())))", "time.sleep(180) #sleep for 3mins, increase this later # def build_latest_inv_file(): # '''NEW TEST", "= transactidstr # newinvalrmcount = invalrmcount # while newinvalrmcount == 100: # time.sleep(3)", "#test6 - nice working test! # tanklist = p.get_tank_list() #gives list of tank", "- BASICALLY THIS MEANS NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE", "currently has gross vol ' + p.get_grossvol_byinvid(latestinvidstr) + ' gals') #test7 #print(str(p.get_tankname_bytankid('10203647'))) #", "transactidstr) # #get and save unique json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) #", "RECORDS # #ELSE IF COUNT >= 100 --> NEED TO ITERATE THRU TO", "= 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty unique inv json", "transactidstr # # while more to get, set new transactid to that from", "g.save_resp_unique_json(newuniquedictresponse, 'latest') # else: # print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest')", "list # for k in list: # try: # if k['iLocationID']: # print('ID:", "#test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 -", "Request to Gateway # soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse", "# #TODO: Place thi ALL into a function that whose job is to", "currently has gross vol of ' # + str(int(float(p.get_grossvol_byinvid(latestinvidstr)))) + ' gals') #", "LESS THAN 100 TANKS!''' # try: # logtxt = '' # g =", "' + str(invalrmcount)) # time.sleep(2) #wait 2 secs # #Step2.5 - make a", "NEW Inv Count: ' + str(invalrmcount)) # #determine inv count - if less", "# if k == 'iOrganizationID': # print(k, v) # #print(v) # #Loc example", "STEP FOR SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for item in tanklist: #for", "= '' # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) #", "actual alarm state # #RUN.PY TEST # # SETUP RUN TEST TO CHECK", "print('new count: ' + str(newinvalrmcount)) # #Step3 - make a second gateway request", "= g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) # g.save_resp_unique_json(uniquedictresponse, nexttransactidstr) # print('Created unique json for TransactionID ' +", "= invalrmcount # while newinvalrmcount == 100: # time.sleep(3) # #replaces step3 #", "json has no records, if so delete it # if len(nexttolastidstr) > 0", "REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # transactidstr =", "now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 - parse and display", "TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY LATEST IF PRESENT AT", "count < 100 to get the latest inventory # nexttransactidstr = transactidstr #", "generic starting point GetInventoryCalcAlarmResponselatest json file! # if len(str(newuniquedictresponse)) > 0: # g.save_resp_unique_json(newuniquedictresponse,", "# #set transactid and count to first one above # #nextinvalrmcount = invalrmcount", "#Step2.5 - make a second gateway req using the TransactionID to create unique", "save file # print('writing parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing", "# for k in d['soap:Body']: # print(k) # break # d = {'ONE':{'TWO':{'THREE':'some", "UNIQUE INVCALCALARM # NOTE: THIS METHOD OF GETTING LATEST INVENTORY ONLY WORKS IF", "up...') # #step1 - request all tanks and write to master tanks file", "' + p.get_tankname_bytankid_file(str(item)) + ' currently has gross vol of ' # +", "# newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW Inv Count: ' + str(newinvalrmcount)) #", "transactidstr) # #get the new inv alarm count from the uniquedictresponse # invalrmcount", "# # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr) # #get and", "# try: # logtxt = '' # g = gateway.Gateway() # firstresponse =", "IT AGAIN! # #ELSE YOU HAVE THE LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO", "# print('writing parsed inventory data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm", "Count: ' + str(newinvalrmcount)) # nexttransactidstr = newtransactidstr #updates nexttransactidstr # # NEW", "# # TODO: Switch print stmts to log statements # print('\\nWELCOME TO THE", "str(k['sAddress1'])) # except KeyError: # pass # #Tank example reading the list in", "test! # tanklist = p.get_tank_list() #gives list of tank ids # print(tanklist) #", "d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in list: # try: # if k['iLocationID']:", "in invalrmlist: # if item['sUTCInventoryTime']: # #datetime_object = datetime.strptime(str(item['sUTCInventoryTime']), '%m %d %Y %I:%M:%S", "#need to fix # #Org example reading the list in Organization value #", "' + str(invalrmcount)) # #set transactid and count to first one above #", "print(latestinvstr) #test6 - nice working test! # tanklist = p.get_tank_list() #gives list of", ">= 100: # #ITERATE TO GET THE LATEST INVENTORY GetInventoryCalcAlarmResponse_latest.json; ALSO DEL EMPTY", "# for item in tanklist: #for each unique tank, create a unique json", "nexttransactidstr # #break while loop if count less than 100 # if nextinvalrmcount", "WORKS IF YOU HAVE LESS THAN 100 TANKS! # #TODO: Place thi ALL", "# print('less than 100, have latest') # g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION", "p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: ' + nexttransactidstr + ' NEW Inv", "# #Org example reading the list in Organization value # print(d['soap:Body']['GetOrganizationResponse']['@xmlns']) # print(d['soap:Body']['GetOrganizationResponse']['iErrorCode'])", "wake up...') # #step1 - request all tanks and write to master tanks", "p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr + ' Inv Count: ' + str(invalrmcount))", "list: # try: # if k['iTankID']: # print('ID: ' + str(k['iTankID'])) # except", "if alarmstatus != '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) + ' currently has", "LATEST INV IN GetInventoryCalcAlarmResponse.json, SAVE TO LATEST # if thecount <= 0: #", "Count: ' + str(invalrmcount)) # #set transactid and count to first one above", "YOU HAVE LESS THAN 100 TANKS!''' # try: # logtxt = '' #", "p.get_tank_list() # for item in tanklist: # print(item) #test2 # invlist = p.get_inventory_list()", "- Process the json file to get the TransactionID and Inv Calc Alarm", "new inv, Use latest unique - BASICALLY THIS MEANS NEED TO COMPARE EMPTY", "+ ' calc alarm bits') # #TODO: Add function in Process to perform", "var # print('NEW TransactionID: ' + nexttransactidstr + ' NEW Inv Count: '", "item in tanklist: #for each unique tank, create a unique json file for", "' gals') # #step5 - works now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())))", "# print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist = d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: # print(item)", "list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns list # for k in list: # try: #", "#IF COUNT <= 0 --> NO NEW INV RECORDS # #MUST USE LATEST", "# soapResponse = g.gateway_request(soapreqs.get_org_soap()) # soapResponse = g.gateway_request(soapreqs.get_loc_soap()) # soapResponse = g.gateway_request(soapreqs.get_tank_soap()) #", "= p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': # print('Tank ' + p.get_tankname_bytankid_file(str(item)) +", "# #TODO: Add function in Process to perform an alarm bits lookup to", "= gateway.Process() # thecount = p.count_inventorycalcalrm() # transactidstr = p.get_inventorycalcalrm_transactID() # print('TransactID: '", "latest inv above based on count # g = gateway.Gateway() # p =", "Alarm count # p = gateway.Process() # transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount =", "- make request using simple inventory soap (ie. zero as ACK code), parse", "RECORDS - THIS PROCESS GIVES YOU LATEST UNIQUE INVCALCALARM # NOTE: THIS METHOD", "# transactidstr = p.get_inventorycalcalrm_transactID() # invalrmcount = p.count_inventorycalcalrm() # print('TransactionID: ' + transactidstr", "Address: ' + str(k['sAddress1'])) # except KeyError: # pass # #Tank example reading", "latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice working test! # tanklist", "g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, save the latest non-empty unique inv json file to the", "3 above - need tank and inv # for item in tanklist: #", "#print(v) # #Loc example reading the list in Location value # print('Return code:", "# d = {'ONE':{'TWO':{'THREE':'some txt value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) #", "value'}}} # pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist", "json reponse # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # g.save_resp_unique_json(uniquedictresponse, transactidstr) # #get the new", "p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) # #determine inv count", "id for the tank # print('TankID: ' + str(item) + ' currently has", "from datetime import datetime #Imports currently used for testing only # import pprint", "tanklist: # print(item) #test2 # invlist = p.get_inventory_list() # for item in invlist:", "step 4 - fully working # g = gateway.Gateway() # p = gateway.Process()", "invalrmcount # while newinvalrmcount == 100: # time.sleep(3) # #replaces step3 # uniquedictresponse", "dictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) #soapreqs.get_invalrm_transactid_soap('0') works the same # # Step2 - Process the", "uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # # g.save_resp_unique_json(firstresponse, transactidstr) # except: # logtxt = 'error'", "list # for k in list: # try: # if k['iTankID']: # print('ID:", "nexttolastidstr = nexttransactidstr # #break while loop if count less than 100 #", "# nextinvalrmcount = thecount # nexttransactidstr = transactidstr # # while more to", "# #now, check if latest unique json has no records, if so delete", "json file. # g = gateway.Gateway() # firstresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap())) # g.save_resp_json(firstresponse) #", "# # TEST 8 - full test working thru step 4 - fully", "= gateway.Gateway() # p = gateway.Process() # #step1 - req all tanks and", "each unique tank, create a unique file for each tank # g.save_resp_unique_json(g.parse_response(g.gateway_request(soapreqs.get_tankgenlatlon_soap(item))), item)", "import soapreqs import time from datetime import datetime #Imports currently used for testing", "deletresponsestr = 'data/GetInventoryCalcAlarmResponse{0}.json' # g.delete_resp_unique_json(deletresponsestr.format(nexttransactidstr)) # #finally, rename the unique inv json file", "list # latestinvidstr = p.get_latestinvid_bytank(str(item)) #get the latest inventory id for the tank", "for item in tanklist: #display latest inventory for each tank in list #", "file to get the TransactionID and Inv Calc Alarm count # p =", "# latestinvstr = p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice working test! #", "= g.gateway_request(soapreqs.get_inv_soap()) # soapResponse = g.gateway_request(soapreqs.get_invalrm_soap()) # tankgenlatlonstr = '10203647' # soapResponse =", "newinvalrmcount == 100: # time.sleep(3) # #replaces step3 # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(nexttransactidstr))) #", "inv and save file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # #step4 - for each tank in", "CALC TRANSACTIONID TESTS # # Step1 - make request using simple inventory soap", "#step5 - works now, similar to step 4 # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #step7 -", "code: ' + str(d['soap:Body']['GetLocationResponse']['iErrorCode'])) # print('Location List: ') # list = d['soap:Body']['GetLocationResponse']['GetLocationResult']['Location'] #returns", "# print('TransactID: ' + transactidstr) # print('Inventory count: ' + str(thecount)) # #IF", "SEVERAL ITEMS BELOW!!!!!!! # #print(tanklist) # for item in tanklist: #for each unique", "NEED TO COMPARE EMPTY GetInventoryCalcAlarmResponse.json # #FILE TO THE LATEST GetInventoryCalcAlarmResponse_latest.json INVENTORY THAT", "= [] # if invalrmcount == 100: # print('more than 100, need to", "= p.get_latestinvid_bytank('10203647') #works! # print(latestinvstr) #test6 - nice working test! # tanklist =", "nexttransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) # nextinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) # time.sleep(2) # #now, check if", "inv alrm count from the newtransactidstr # newinvalrmcount = p.count_inventorycalcalrm_unique(newtransactidstr) # print(' NEW", "# return logtxt # # TEST 9 - modified test #8 for using", "print('more than 100, need to iterate to latest') # #transactidstr = p.get_inventorycalcalrm_transactID() #", "str(invalrmcount)) # #set transactid and count to first one above # #nextinvalrmcount =", "'error' # return logtxt # # TEST 9 - modified test #8 for", "newtransactidstr + ' NEW Inv Count: ' + str(newinvalrmcount)) # #Step 5- Repeat", "unique json has no records, if so delete it # if len(nexttolastidstr) >", "WILL GIVE AN EMPTY NEXT REPONSE # # uniquedictresponse = g.parse_response(g.gateway_request(soapreqs.get_invalrm_transactid_soap(transactidstr))) # #", "# for item in bothlist: # print(item) #test4 # print(p.get_grossvol_byinvid('194699940')) #test5 # latestinvstr", "!= str(0): # # print('Tank ' + item['iTankID'] + ' has alarm status", "for k in list: # try: # if k['iTankID']: # print('ID: ' +", "100 to get the latest inventory # nexttransactidstr = transactidstr # newinvalrmcount =", "the tank # alarmstatus = p.get_tankalrm_byinvid(latestinvidstr) # if alarmstatus != '0': # print('Tank", "tanks file # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_tank_soap()))) # time.sleep(2) # #step2 - build tank list from", "nexttransactidstr) # print('Created unique json for TransactionID ' + nexttransactidstr) # #replaces step4", "str(k['iTankID'])) # except KeyError: # pass # --------------------------------------------------------- # ''' REAL GATEWAY TEST", "d['soap:Body']['GetTankResponse']['GetTankResult']['Tank'] # for item in tanklist: # print(item) #need to fix # #Org", "nexttransactidstr = newtransactidstr #updates nexttransactidstr # # NEW TEST TO GET LATEST INV", "= p.count_inventorycalcalrm_unique(transactidstr) # print(' NEW Inv Count: ' + str(invalrmcount)) # #determine inv", "= p.get_inventory_list() # for item in invlist: # print(item) #test3 # bothlist =", "# pprint.pprint(d) # print(d['ONE']) # print(d['ONE']['TWO']) # print(d['soap:Body']['GetTankResponse']['@xmlns']) # print(d['soap:Body']['GetTankResponse']['iErrorCode']) # tanklist =", "#Step4 - Now parse the unique json file to get the new transaction", "newinvalrmcount = p.count_inventorycalcalrm_unique(nexttransactidstr) #updates newinvalrmcount # newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW", "# #RUN.PY TEST # # SETUP RUN TEST TO CHECK FOR CHANGES VIA", "Inv Count: ' + str(invalrmcount)) # time.sleep(2) #wait 2 secs # #Step2.5 -", "# g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_inv_soap()))) # print('writing parsed alarm data to file...') # g.save_resp_json(g.parse_response(g.gateway_request(soapreqs.get_invalrm_soap()))) # #delay", "LATEST IF PRESENT AT END! # print('more than 100, need to iterate to", "item['iTankID'] + ' has alarm status ' + item['iCalcAlarmBits']) # f = open('temp.json',", "of tank ids - THIS IS AN IMPORTANT STEP FOR SEVERAL ITEMS BELOW!!!!!!!", "print(' NEW Inv Count: ' + str(newinvalrmcount)) # #update nexttransactid and nextinvalrmcount #", "# g.save_resp_unique_json(uniquedictresponse, 'latest') # PROCESSING TEST SECTION ONLY # p = gateway.Process() #test1", "for the tank # print('TankID: ' + str(item) + ' currently has gross", "# newtransactidstr = p.get_inventorycalcalrm_unique_transactID(nexttransactidstr) #temp var # print('NEW TransactionID: ' + nexttransactidstr +", "print('Tank ' + item['iTankID'] + ' has alarm status ' + item['iCalcAlarmBits']) #", "- if less than 100, nothing more to do # nexttolastidstr = ''" ]
[ "Python :: 3' 'Programming Language :: Python :: 3.7' 'Programming Language :: Python", "_get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'],", "package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']),", "as fh: return [line.strip() for line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as", "Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python", "with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line in fh.readlines()] def _get_readme(): with", "def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(), package_info) return package_info", "Language :: Python :: 3' 'Programming Language :: Python :: 3.7' 'Programming Language", "fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py'))", "_get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO", "package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'],", "packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language ::", "return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(), package_info)", "'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python", "with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO =", "License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python :: 3' 'Programming Language", "fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(),", "[line.strip() for line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read()", ":: 3.7' 'Programming Language :: Python :: 3.8', 'Programming Language :: Python ::", "as fh: package_info = {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup(", "open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info =", "os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line in", "fh: return [line.strip() for line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh:", "file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line in fh.readlines()]", "'Programming Language :: Python :: 3' 'Programming Language :: Python :: 3.7' 'Programming", "author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python :: 3' 'Programming Language ::", "open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info()", "Language :: Python :: 3.7' 'Programming Language :: Python :: 3.8', 'Programming Language", "def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as", "with open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info", "fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {} exec(fh.read(), package_info) return", "long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language", "Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', ], )", "= {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'],", "_get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line in fh.readlines()] def _get_readme():", "line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info():", "3' 'Programming Language :: Python :: 3.7' 'Programming Language :: Python :: 3.8',", "fh: package_info = {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'],", "Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language", "install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python ::", "setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'],", ":: 3' 'Programming Language :: Python :: 3.7' 'Programming Language :: Python ::", "license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python :: 3' 'Programming", "setuptools def _build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh:", "base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for", "_PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT", "in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info(): with", "import os import setuptools def _build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with", "Python :: 3.7' 'Programming Language :: Python :: 3.8', 'Programming Language :: Python", "name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'],", "maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python :: 3' 'Programming Language :: Python ::", "url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python :: 3'", "'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming", "classifiers=[ 'Programming Language :: Python :: 3' 'Programming Language :: Python :: 3.7'", "<reponame>vspaz/pclient import os import setuptools def _build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies():", "def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line in fh.readlines()] def", "version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[", "for line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read() def", "3.7' 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9',", "os import setuptools def _build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt'))", "'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', ],", ":: Python :: 3.7' 'Programming Language :: Python :: 3.8', 'Programming Language ::", "author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming Language :: Python :: 3' 'Programming Language :: Python", "open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md'))", "_get_readme(): with open(_build_path(file_path='README.md')) as fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh:", ":: Python :: 3' 'Programming Language :: Python :: 3.7' 'Programming Language ::", "_build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip()", ":: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python ::", "3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10',", "import setuptools def _build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as", "exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests',", "def _build_path(file_path, base=os.path.abspath(os.path.dirname(__file__))): return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return", "'Programming Language :: Python :: 3.7' 'Programming Language :: Python :: 3.8', 'Programming", "return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(),", "as fh: return fh.read() def _get_package_info(): with open(_build_path(file_path='pyclient/__version__.py')) as fh: package_info = {}", "= _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License',", "return [line.strip() for line in fh.readlines()] def _get_readme(): with open(_build_path(file_path='README.md')) as fh: return", "return os.path.join(base, file_path) def _get_dependencies(): with open(_build_path(file_path='requirements/prod.txt')) as fh: return [line.strip() for line", ":: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language ::", "description=_PACKAGE_INFO['__description__'], long_description=_get_readme(), packages=setuptools.find_packages(exclude=['tests', 'requirements']), install_requires=_get_dependencies(), url=_PACKAGE_INFO['__url__'], license='MIT License', author=_PACKAGE_INFO['__author__'], author_email=_PACKAGE_INFO['__email__'], maintainer=_PACKAGE_INFO['__maintainer__'], classifiers=[ 'Programming", "package_info = {} exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'],", "{} exec(fh.read(), package_info) return package_info _PACKAGE_INFO = _get_package_info() setuptools.setup( name=_PACKAGE_INFO['__title__'], version=_PACKAGE_INFO['__version__'], description=_PACKAGE_INFO['__description__'], long_description=_get_readme()," ]
[ "<gh_stars>0 \"\"\"Hello world! A brief overview of the package should go here. \"\"\"" ]
[ "list(map(int, input().split(\" \"))) arr = [ ] count = count2 = 0 for", "1): if i%j == 0: count += 1 if count == 1: arr.append(i)", "+= 1 if count == 1: arr.append(i) count = 0 for i in", "[ ] count = count2 = 0 for i in range(n, m+1): for", "count2 = 0 for i in range(n, m+1): for j in range(1, i//2", "count == 1: arr.append(i) count = 0 for i in arr: if i", "j in range(1, i//2 + 1): if i%j == 0: count += 1", "arr = [ ] count = count2 = 0 for i in range(n,", "count += 1 if count == 1: arr.append(i) count = 0 for i", "= [ ] count = count2 = 0 for i in range(n, m+1):", "count = count2 = 0 for i in range(n, m+1): for j in", "+ 1): if i%j == 0: count += 1 if count == 1:", "== 1: arr.append(i) count = 0 for i in arr: if i +", "m = list(map(int, input().split(\" \"))) arr = [ ] count = count2 =", "1: arr.append(i) count = 0 for i in arr: if i + 6", "count = 0 for i in arr: if i + 6 in arr:", "range(1, i//2 + 1): if i%j == 0: count += 1 if count", "if count == 1: arr.append(i) count = 0 for i in arr: if", "m+1): for j in range(1, i//2 + 1): if i%j == 0: count", "in range(n, m+1): for j in range(1, i//2 + 1): if i%j ==", "arr.append(i) count = 0 for i in arr: if i + 6 in", "i in arr: if i + 6 in arr: count2 += 1 print(count2)", "i in range(n, m+1): for j in range(1, i//2 + 1): if i%j", "i%j == 0: count += 1 if count == 1: arr.append(i) count =", "for i in arr: if i + 6 in arr: count2 += 1", "== 0: count += 1 if count == 1: arr.append(i) count = 0", "in range(1, i//2 + 1): if i%j == 0: count += 1 if", "for i in range(n, m+1): for j in range(1, i//2 + 1): if", "\"))) arr = [ ] count = count2 = 0 for i in", "] count = count2 = 0 for i in range(n, m+1): for j", "= list(map(int, input().split(\" \"))) arr = [ ] count = count2 = 0", "0: count += 1 if count == 1: arr.append(i) count = 0 for", "0 for i in range(n, m+1): for j in range(1, i//2 + 1):", "if i%j == 0: count += 1 if count == 1: arr.append(i) count", "range(n, m+1): for j in range(1, i//2 + 1): if i%j == 0:", "1 if count == 1: arr.append(i) count = 0 for i in arr:", "= 0 for i in arr: if i + 6 in arr: count2", "for j in range(1, i//2 + 1): if i%j == 0: count +=", "= count2 = 0 for i in range(n, m+1): for j in range(1,", "input().split(\" \"))) arr = [ ] count = count2 = 0 for i", "0 for i in arr: if i + 6 in arr: count2 +=", "n, m = list(map(int, input().split(\" \"))) arr = [ ] count = count2", "i//2 + 1): if i%j == 0: count += 1 if count ==", "= 0 for i in range(n, m+1): for j in range(1, i//2 +" ]
[ "@SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max'] x = (x +", "std = kwargs['mean'], kwargs['std'] x = x * std x = x +", "* x * (_max - _min) + _min return x @SCALER_REGISTRY.register() def standard_re_transform(x,", "x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std'] x = x", "kwargs['min'], kwargs['max'] x = (x + 1.) / 2. x = 1. *", "_max = kwargs['min'], kwargs['max'] x = (x + 1.) / 2. x =", "basicts.utils.registry import SCALER_REGISTRY \"\"\" data normalization and re-normalization \"\"\" # ====================================== re-normalizations ======================================", "data normalization and re-normalization \"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x,", "kwargs['std'] x = x * std x = x + mean return x", "x = (x + 1.) / 2. x = 1. * x *", "* (_max - _min) + _min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean,", "- _min) + _min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std =", "1. * x * (_max - _min) + _min return x @SCALER_REGISTRY.register() def", "return x # ====================================== normalizations ====================================== # # omitted to avoid redundancy, as", "= kwargs['min'], kwargs['max'] x = (x + 1.) / 2. x = 1.", "avoid redundancy, as they should only be used in data preprocessing in `scripts/data_preparation`", "# @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max'] x = (x", "import SCALER_REGISTRY \"\"\" data normalization and re-normalization \"\"\" # ====================================== re-normalizations ====================================== #", "and re-normalization \"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min,", "\"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max =", "1.) / 2. x = 1. * x * (_max - _min) +", "normalizations ====================================== # # omitted to avoid redundancy, as they should only be", "normalization and re-normalization \"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs):", "# omitted to avoid redundancy, as they should only be used in data", "====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max']", "# ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'],", "x = 1. * x * (_max - _min) + _min return x", "x * (_max - _min) + _min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs):", "= 1. * x * (_max - _min) + _min return x @SCALER_REGISTRY.register()", "re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max'] x = (x + 1.) /", "(x + 1.) / 2. x = 1. * x * (_max -", "====================================== normalizations ====================================== # # omitted to avoid redundancy, as they should only", "(_max - _min) + _min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std", "# ====================================== normalizations ====================================== # # omitted to avoid redundancy, as they should", "+ 1.) / 2. x = 1. * x * (_max - _min)", "x * std x = x + mean return x # ====================================== normalizations", "x + mean return x # ====================================== normalizations ====================================== # # omitted to", "====================================== # # omitted to avoid redundancy, as they should only be used", "@SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std'] x = x *", "**kwargs): _min, _max = kwargs['min'], kwargs['max'] x = (x + 1.) / 2.", "= kwargs['mean'], kwargs['std'] x = x * std x = x + mean", "omitted to avoid redundancy, as they should only be used in data preprocessing", "2. x = 1. * x * (_max - _min) + _min return", "return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std'] x =", "/ 2. x = 1. * x * (_max - _min) + _min", "re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max'] x", "_min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std'] x", "# # omitted to avoid redundancy, as they should only be used in", "**kwargs): mean, std = kwargs['mean'], kwargs['std'] x = x * std x =", "to avoid redundancy, as they should only be used in data preprocessing in", "\"\"\" data normalization and re-normalization \"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def", "def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max'] x = (x + 1.)", "kwargs['max'] x = (x + 1.) / 2. x = 1. * x", "_min) + _min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'],", "= x * std x = x + mean return x # ======================================", "* std x = x + mean return x # ====================================== normalizations ======================================", "x # ====================================== normalizations ====================================== # # omitted to avoid redundancy, as they", "x = x * std x = x + mean return x #", "SCALER_REGISTRY \"\"\" data normalization and re-normalization \"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register()", "+ mean return x # ====================================== normalizations ====================================== # # omitted to avoid", "standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std'] x = x * std x", "= x + mean return x # ====================================== normalizations ====================================== # # omitted", "====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max = kwargs['min'], kwargs['max'] x =", "= (x + 1.) / 2. x = 1. * x * (_max", "from basicts.utils.registry import SCALER_REGISTRY \"\"\" data normalization and re-normalization \"\"\" # ====================================== re-normalizations", "mean, std = kwargs['mean'], kwargs['std'] x = x * std x = x", "kwargs['mean'], kwargs['std'] x = x * std x = x + mean return", "mean return x # ====================================== normalizations ====================================== # # omitted to avoid redundancy,", "x = x + mean return x # ====================================== normalizations ====================================== # #", "std x = x + mean return x # ====================================== normalizations ====================================== #", "re-normalization \"\"\" # ====================================== re-normalizations ====================================== # @SCALER_REGISTRY.register() def re_max_min_normalization(x, **kwargs): _min, _max", "def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std'] x = x * std", "+ _min return x @SCALER_REGISTRY.register() def standard_re_transform(x, **kwargs): mean, std = kwargs['mean'], kwargs['std']", "_min, _max = kwargs['min'], kwargs['max'] x = (x + 1.) / 2. x" ]
[ "8}\") print(f\"{num} x {9:2} = {num * 9}\") print(f\"{num} x {10:2} = {num", "print(f\"{num} x {8:2} = {num * 8}\") print(f\"{num} x {9:2} = {num *", "print(f\"{num} x {4:2} = {num * 4}\") print(f\"{num} x {5:2} = {num *", "um número para a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} = {num *", "print(f\"{num} x {6:2} = {num * 6}\") print(f\"{num} x {7:2} = {num *", "{6:2} = {num * 6}\") print(f\"{num} x {7:2} = {num * 7}\") print(f\"{num}", "* 2}\") print(f\"{num} x {3:2} = {num * 3}\") print(f\"{num} x {4:2} =", "= {num * 5}\") print(f\"{num} x {6:2} = {num * 6}\") print(f\"{num} x", "* 1}\") print(f\"{num} x {2:2} = {num * 2}\") print(f\"{num} x {3:2} =", "* 7}\") print(f\"{num} x {8:2} = {num * 8}\") print(f\"{num} x {9:2} =", "{2:2} = {num * 2}\") print(f\"{num} x {3:2} = {num * 3}\") print(f\"{num}", "* 5}\") print(f\"{num} x {6:2} = {num * 6}\") print(f\"{num} x {7:2} =", "* 4}\") print(f\"{num} x {5:2} = {num * 5}\") print(f\"{num} x {6:2} =", "* 6}\") print(f\"{num} x {7:2} = {num * 7}\") print(f\"{num} x {8:2} =", "4}\") print(f\"{num} x {5:2} = {num * 5}\") print(f\"{num} x {6:2} = {num", "x {6:2} = {num * 6}\") print(f\"{num} x {7:2} = {num * 7}\")", "{9:2} = {num * 9}\") print(f\"{num} x {10:2} = {num * 10}\") print(\"-\"*12)", "{num * 7}\") print(f\"{num} x {8:2} = {num * 8}\") print(f\"{num} x {9:2}", "print(f\"{num} x {9:2} = {num * 9}\") print(f\"{num} x {10:2} = {num *", "= {num * 7}\") print(f\"{num} x {8:2} = {num * 8}\") print(f\"{num} x", "= {num * 6}\") print(f\"{num} x {7:2} = {num * 7}\") print(f\"{num} x", "{3:2} = {num * 3}\") print(f\"{num} x {4:2} = {num * 4}\") print(f\"{num}", "x {4:2} = {num * 4}\") print(f\"{num} x {5:2} = {num * 5}\")", "\"))) print('-'*12) print(f\"{num} x {1:2} = {num * 1}\") print(f\"{num} x {2:2} =", "x {7:2} = {num * 7}\") print(f\"{num} x {8:2} = {num * 8}\")", "num = (int(input(\"Digite um número para a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2}", "= {num * 4}\") print(f\"{num} x {5:2} = {num * 5}\") print(f\"{num} x", "print('-'*12) print(f\"{num} x {1:2} = {num * 1}\") print(f\"{num} x {2:2} = {num", "print(f\"{num} x {3:2} = {num * 3}\") print(f\"{num} x {4:2} = {num *", "x {2:2} = {num * 2}\") print(f\"{num} x {3:2} = {num * 3}\")", "= {num * 1}\") print(f\"{num} x {2:2} = {num * 2}\") print(f\"{num} x", "{num * 1}\") print(f\"{num} x {2:2} = {num * 2}\") print(f\"{num} x {3:2}", "tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} = {num * 1}\") print(f\"{num} x {2:2}", "print(f\"{num} x {1:2} = {num * 1}\") print(f\"{num} x {2:2} = {num *", "print(f\"{num} x {5:2} = {num * 5}\") print(f\"{num} x {6:2} = {num *", "para a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} = {num * 1}\") print(f\"{num}", "{5:2} = {num * 5}\") print(f\"{num} x {6:2} = {num * 6}\") print(f\"{num}", "{num * 8}\") print(f\"{num} x {9:2} = {num * 9}\") print(f\"{num} x {10:2}", "número para a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} = {num * 1}\")", "x {5:2} = {num * 5}\") print(f\"{num} x {6:2} = {num * 6}\")", "print(\"Bem-Vindo a Tabuada v1.0!\") num = (int(input(\"Digite um número para a tabuada: \")))", "a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} = {num * 1}\") print(f\"{num} x", "1}\") print(f\"{num} x {2:2} = {num * 2}\") print(f\"{num} x {3:2} = {num", "v1.0!\") num = (int(input(\"Digite um número para a tabuada: \"))) print('-'*12) print(f\"{num} x", "= (int(input(\"Digite um número para a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} =", "{num * 6}\") print(f\"{num} x {7:2} = {num * 7}\") print(f\"{num} x {8:2}", "{num * 3}\") print(f\"{num} x {4:2} = {num * 4}\") print(f\"{num} x {5:2}", "print(f\"{num} x {7:2} = {num * 7}\") print(f\"{num} x {8:2} = {num *", "7}\") print(f\"{num} x {8:2} = {num * 8}\") print(f\"{num} x {9:2} = {num", "print(f\"{num} x {2:2} = {num * 2}\") print(f\"{num} x {3:2} = {num *", "{num * 5}\") print(f\"{num} x {6:2} = {num * 6}\") print(f\"{num} x {7:2}", "{1:2} = {num * 1}\") print(f\"{num} x {2:2} = {num * 2}\") print(f\"{num}", "* 8}\") print(f\"{num} x {9:2} = {num * 9}\") print(f\"{num} x {10:2} =", "(int(input(\"Digite um número para a tabuada: \"))) print('-'*12) print(f\"{num} x {1:2} = {num", "x {3:2} = {num * 3}\") print(f\"{num} x {4:2} = {num * 4}\")", "* 3}\") print(f\"{num} x {4:2} = {num * 4}\") print(f\"{num} x {5:2} =", "{num * 2}\") print(f\"{num} x {3:2} = {num * 3}\") print(f\"{num} x {4:2}", "{4:2} = {num * 4}\") print(f\"{num} x {5:2} = {num * 5}\") print(f\"{num}", "x {1:2} = {num * 1}\") print(f\"{num} x {2:2} = {num * 2}\")", "2}\") print(f\"{num} x {3:2} = {num * 3}\") print(f\"{num} x {4:2} = {num", "{7:2} = {num * 7}\") print(f\"{num} x {8:2} = {num * 8}\") print(f\"{num}", "{num * 4}\") print(f\"{num} x {5:2} = {num * 5}\") print(f\"{num} x {6:2}", "Tabuada v1.0!\") num = (int(input(\"Digite um número para a tabuada: \"))) print('-'*12) print(f\"{num}", "= {num * 2}\") print(f\"{num} x {3:2} = {num * 3}\") print(f\"{num} x", "= {num * 3}\") print(f\"{num} x {4:2} = {num * 4}\") print(f\"{num} x", "= {num * 8}\") print(f\"{num} x {9:2} = {num * 9}\") print(f\"{num} x", "a Tabuada v1.0!\") num = (int(input(\"Digite um número para a tabuada: \"))) print('-'*12)", "3}\") print(f\"{num} x {4:2} = {num * 4}\") print(f\"{num} x {5:2} = {num", "{8:2} = {num * 8}\") print(f\"{num} x {9:2} = {num * 9}\") print(f\"{num}", "x {8:2} = {num * 8}\") print(f\"{num} x {9:2} = {num * 9}\")", "6}\") print(f\"{num} x {7:2} = {num * 7}\") print(f\"{num} x {8:2} = {num", "x {9:2} = {num * 9}\") print(f\"{num} x {10:2} = {num * 10}\")", "5}\") print(f\"{num} x {6:2} = {num * 6}\") print(f\"{num} x {7:2} = {num" ]
[ "f def mysum(seq): match seq: case []: s = 0 case [head, *tail]:", "demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test out all the optimizations for code_str", "= ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' => optimized') print(opt)", "s = head + mysum(tail) return s # This one is superficially different", "than in the paper! # # Hm this depends on __match_args__ ? Is", "from ast import BinOp, UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg):", "optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__': try: main(sys.argv) except RuntimeError as", "Run with Python 3.10 \"\"\" from __future__ import print_function import sys import ast", "5']: print(' %s' % code_str) module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt", "mysum(tail) return s # This one is superficially different than in the paper!", "code_str) module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' =>", "out all the optimizations for code_str in ['3 + 4', '3 - 0',", "f = 1 case n: f = n * fact(n - 1) return", "__name__ == '__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL: %s' % e,", "return s # This one is superficially different than in the paper! #", "1) return f def mysum(seq): match seq: case []: s = 0 case", "ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt))", "fact(arg): match arg: case 0 | 1: f = 1 case n: f", "Python 3.10 \"\"\" from __future__ import print_function import sys import ast from ast", "| Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _:", "set in the ast module nodes? def simplify(node): match node: case BinOp(Constant(left), Add(),", "optimizations for code_str in ['3 + 4', '3 - 0', '- - 5']:", "sys import ast from ast import BinOp, UnaryOp, Constant, Add, Sub, USub #", "2, 3])) # Test out all the optimizations for code_str in ['3 +", "# https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0 | 1: f = 1", "it set in the ast module nodes? def simplify(node): match node: case BinOp(Constant(left),", "head + mysum(tail) return s # This one is superficially different than in", "- 5']: print(' %s' % code_str) module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr))", "= 1 case n: f = n * fact(n - 1) return f", "<reponame>oilshell/blog-code #!/usr/bin/env python3 \"\"\" demo.py Run with Python 3.10 \"\"\" from __future__ import", "import sys import ast from ast import BinOp, UnaryOp, Constant, Add, Sub, USub", "*tail]: s = head + mysum(tail) return s # This one is superficially", "nodes? def simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right)", "expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----')", "UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _: return node def main(argv): print('Hello from", "Add(), Constant(right)): return Constant(left + right) case BinOp(left, Add() | Sub(), Constant(0)): return", "opt = simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__':", "UnaryOp(USub(), item)): return simplify(item) case _: return node def main(argv): print('Hello from demo.py')", "return node def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test", "the paper! # # Hm this depends on __match_args__ ? Is it set", "ast module nodes? def simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left", "3.10 \"\"\" from __future__ import print_function import sys import ast from ast import", "mysum(seq): match seq: case []: s = 0 case [head, *tail]: s =", "print(' %s' % code_str) module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt =", "case n: f = n * fact(n - 1) return f def mysum(seq):", "return f def mysum(seq): match seq: case []: s = 0 case [head,", "['3 + 4', '3 - 0', '- - 5']: print(' %s' % code_str)", "item)): return simplify(item) case _: return node def main(argv): print('Hello from demo.py') print(fact(6))", "case BinOp(left, Add() | Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return", "0 case [head, *tail]: s = head + mysum(tail) return s # This", "return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _: return node def", "from __future__ import print_function import sys import ast from ast import BinOp, UnaryOp,", "simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__': try: main(sys.argv)", "1: f = 1 case n: f = n * fact(n - 1)", "import BinOp, UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg:", "BinOp, UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case", "on __match_args__ ? Is it set in the ast module nodes? def simplify(node):", "3])) # Test out all the optimizations for code_str in ['3 + 4',", "1 case n: f = n * fact(n - 1) return f def", "# Test out all the optimizations for code_str in ['3 + 4', '3", "__future__ import print_function import sys import ast from ast import BinOp, UnaryOp, Constant,", "module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__", "print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__': try: main(sys.argv) except", "0 | 1: f = 1 case n: f = n * fact(n", "paper! # # Hm this depends on __match_args__ ? Is it set in", "module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' => optimized')", "'__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL: %s' % e, file=sys.stderr) sys.exit(1)", "def fact(arg): match arg: case 0 | 1: f = 1 case n:", "% code_str) module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print('", "arg: case 0 | 1: f = 1 case n: f = n", "# # Hm this depends on __match_args__ ? Is it set in the", "# Hm this depends on __match_args__ ? Is it set in the ast", "right) case BinOp(left, Add() | Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)):", "return Constant(left + right) case BinOp(left, Add() | Sub(), Constant(0)): return simplify(left) case", "# This one is superficially different than in the paper! # # Hm", "n: f = n * fact(n - 1) return f def mysum(seq): match", "case _: return node def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3]))", "case [head, *tail]: s = head + mysum(tail) return s # This one", "__match_args__ ? Is it set in the ast module nodes? def simplify(node): match", "s = 0 case [head, *tail]: s = head + mysum(tail) return s", "= head + mysum(tail) return s # This one is superficially different than", "4', '3 - 0', '- - 5']: print(' %s' % code_str) module =", "\"\"\" demo.py Run with Python 3.10 \"\"\" from __future__ import print_function import sys", "import ast from ast import BinOp, UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf", "the optimizations for code_str in ['3 + 4', '3 - 0', '- -", "== '__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL: %s' % e, file=sys.stderr)", "Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0 | 1:", "Constant(right)): return Constant(left + right) case BinOp(left, Add() | Sub(), Constant(0)): return simplify(left)", "BinOp(left, Add() | Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item)", "? Is it set in the ast module nodes? def simplify(node): match node:", "def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test out all", "print(mysum([1, 2, 3])) # Test out all the optimizations for code_str in ['3", "#!/usr/bin/env python3 \"\"\" demo.py Run with Python 3.10 \"\"\" from __future__ import print_function", "UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0", "different than in the paper! # # Hm this depends on __match_args__ ?", "in ['3 + 4', '3 - 0', '- - 5']: print(' %s' %", "* fact(n - 1) return f def mysum(seq): match seq: case []: s", "- 1) return f def mysum(seq): match seq: case []: s = 0", "+ mysum(tail) return s # This one is superficially different than in the", "fact(n - 1) return f def mysum(seq): match seq: case []: s =", "the ast module nodes? def simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)): return", "\"\"\" from __future__ import print_function import sys import ast from ast import BinOp,", "=> optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__': try: main(sys.argv) except RuntimeError", "Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _: return node", "print(fact(6)) print(mysum([1, 2, 3])) # Test out all the optimizations for code_str in", "0', '- - 5']: print(' %s' % code_str) module = ast.parse(code_str) expr =", "for code_str in ['3 + 4', '3 - 0', '- - 5']: print('", "= 0 case [head, *tail]: s = head + mysum(tail) return s #", "seq: case []: s = 0 case [head, *tail]: s = head +", "in the paper! # # Hm this depends on __match_args__ ? Is it", "simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right) case BinOp(left,", "case []: s = 0 case [head, *tail]: s = head + mysum(tail)", "superficially different than in the paper! # # Hm this depends on __match_args__", "Add() | Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case", "This one is superficially different than in the paper! # # Hm this", "Constant(left + right) case BinOp(left, Add() | Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(),", "print_function import sys import ast from ast import BinOp, UnaryOp, Constant, Add, Sub,", "all the optimizations for code_str in ['3 + 4', '3 - 0', '-", "ast import BinOp, UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match", "depends on __match_args__ ? Is it set in the ast module nodes? def", "simplify(item) case _: return node def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2,", "case BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right) case BinOp(left, Add() | Sub(),", "print('-----') if __name__ == '__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL: %s'", "USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0 | 1: f =", "demo.py Run with Python 3.10 \"\"\" from __future__ import print_function import sys import", "module nodes? def simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left +", "ast from ast import BinOp, UnaryOp, Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def", "Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0 | 1: f", "import print_function import sys import ast from ast import BinOp, UnaryOp, Constant, Add,", "[]: s = 0 case [head, *tail]: s = head + mysum(tail) return", "print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__': try: main(sys.argv) except RuntimeError as e:", "match seq: case []: s = 0 case [head, *tail]: s = head", "- 0', '- - 5']: print(' %s' % code_str) module = ast.parse(code_str) expr", "if __name__ == '__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL: %s' %", "one is superficially different than in the paper! # # Hm this depends", "f = n * fact(n - 1) return f def mysum(seq): match seq:", "match arg: case 0 | 1: f = 1 case n: f =", "is superficially different than in the paper! # # Hm this depends on", "case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _: return node def main(argv): print('Hello", "with Python 3.10 \"\"\" from __future__ import print_function import sys import ast from", "Hm this depends on __match_args__ ? Is it set in the ast module", "= simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ == '__main__': try:", "from demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test out all the optimizations for", "this depends on __match_args__ ? Is it set in the ast module nodes?", "code_str in ['3 + 4', '3 - 0', '- - 5']: print(' %s'", "BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right) case BinOp(left, Add() | Sub(), Constant(0)):", "node def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test out", "= module.body[0].value print(ast.dump(expr)) opt = simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if", "print(ast.dump(opt)) print('-----') if __name__ == '__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL:", "python3 \"\"\" demo.py Run with Python 3.10 \"\"\" from __future__ import print_function import", "n * fact(n - 1) return f def mysum(seq): match seq: case []:", "+ right) case BinOp(left, Add() | Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(),", "[head, *tail]: s = head + mysum(tail) return s # This one is", "= n * fact(n - 1) return f def mysum(seq): match seq: case", "Is it set in the ast module nodes? def simplify(node): match node: case", "case 0 | 1: f = 1 case n: f = n *", "+ 4', '3 - 0', '- - 5']: print(' %s' % code_str) module", "print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test out all the optimizations", "def simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right) case", "simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _: return node def main(argv):", "'3 - 0', '- - 5']: print(' %s' % code_str) module = ast.parse(code_str)", "| 1: f = 1 case n: f = n * fact(n -", "match node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right) case BinOp(left, Add()", "main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3])) # Test out all the", "Constant, Add, Sub, USub # https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0 |", "_: return node def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1, 2, 3])) #", "'- - 5']: print(' %s' % code_str) module = ast.parse(code_str) expr = module.body[0].value", "def mysum(seq): match seq: case []: s = 0 case [head, *tail]: s", "Test out all the optimizations for code_str in ['3 + 4', '3 -", "node: case BinOp(Constant(left), Add(), Constant(right)): return Constant(left + right) case BinOp(left, Add() |", "%s' % code_str) module = ast.parse(code_str) expr = module.body[0].value print(ast.dump(expr)) opt = simplify(expr)", "print(ast.dump(expr)) opt = simplify(expr) print(' => optimized') print(opt) print(ast.dump(opt)) print('-----') if __name__ ==", "Sub(), Constant(0)): return simplify(left) case UnaryOp(USub(), UnaryOp(USub(), item)): return simplify(item) case _: return", "in the ast module nodes? def simplify(node): match node: case BinOp(Constant(left), Add(), Constant(right)):", "return simplify(item) case _: return node def main(argv): print('Hello from demo.py') print(fact(6)) print(mysum([1,", "s # This one is superficially different than in the paper! # #", "https://gvanrossum.github.io/docs/PyPatternMatching.pdf def fact(arg): match arg: case 0 | 1: f = 1 case" ]
[ "ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self,", "pass return def setup_parameter(self, aPara_in= None): if aPara_in is not None: self.nParameter_subbasin =", "isinstance(obj, list): pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0", "shutil import copy2 import json from json import JSONEncoder from swaty.classes.swatpara import swatpara", "glob import shutil import numpy as np from pathlib import Path import tarfile", "<filename>swaty/classes/subbasin.py import os,stat import sys import glob import shutil import numpy as np", "is not None: pass else: pass return def setup_parameter(self, aPara_in= None): if aPara_in", "import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) if", "class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name", "self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent", "return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if", "import copy2 import json from json import JSONEncoder from swaty.classes.swatpara import swatpara class", "None def __init__(self, aConfig_in =None): if aConfig_in is not None: pass else: pass", "= 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in =None): if aConfig_in", "json from json import JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self,", "not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy", "not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip = [] obj", "shutil import copyfile from abc import ABCMeta, abstractmethod import datetime from shutil import", "None): if aPara_in is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for", "import tarfile import subprocess from shutil import copyfile from abc import ABCMeta, abstractmethod", "pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name =", "self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy)", "lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in", "for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName =", "swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass", "iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in =None):", "if aPara_in is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i", "= swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else:", "def default(self, obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32): return float(obj)", "pass return def tojson(self): aSkip = [] obj = self.__dict__.copy() for sKey in", "=None): if aConfig_in is not None: pass else: pass return def setup_parameter(self, aPara_in=", "not None: pass else: pass return def setup_parameter(self, aPara_in= None): if aPara_in is", "if aConfig_in is not None: pass else: pass return def setup_parameter(self, aPara_in= None):", "in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if", "pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip", "class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32):", "= ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def", "self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i]", "np.float32): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson())", "if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self, obj) class", "swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj)", "sKey in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent = 4,", "aPara_in= None): if aPara_in is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list()", "return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self,", "default(self, obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32): return float(obj) if", "as np from pathlib import Path import tarfile import subprocess from shutil import", "tarfile import subprocess from shutil import copyfile from abc import ABCMeta, abstractmethod import", "np.integer): return int(obj) if isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist()", "if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list):", "obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self, obj)", "= aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not in", "Path import tarfile import subprocess from shutil import copyfile from abc import ABCMeta,", "sJson = json.dumps(obj,\\ sort_keys=True, \\ indent = 4, \\ ensure_ascii=True, \\ cls=SubbasinClassEncoder) return", "abc import ABCMeta, abstractmethod import datetime from shutil import copy2 import json from", "aConfig_in is not None: pass else: pass return def setup_parameter(self, aPara_in= None): if", "for sKey in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent =", "json import JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if", "range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName", "=list() for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName", "JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer):", "self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return", "copyfile from abc import ABCMeta, abstractmethod import datetime from shutil import copy2 import", "import datetime from shutil import copy2 import json from json import JSONEncoder from", "import os,stat import sys import glob import shutil import numpy as np from", "= len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin", "None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent = 4, \\ ensure_ascii=True, \\ cls=SubbasinClassEncoder)", "sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip = []", "import json from json import JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def", "list): pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer", "[] obj = self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\", "json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta", "from abc import ABCMeta, abstractmethod import datetime from shutil import copy2 import json", "def setup_parameter(self, aPara_in= None): if aPara_in is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list()", "else: pass return def tojson(self): aSkip = [] obj = self.__dict__.copy() for sKey", "import numpy as np from pathlib import Path import tarfile import subprocess from", "os,stat import sys import glob import shutil import numpy as np from pathlib", "import copyfile from abc import ABCMeta, abstractmethod import datetime from shutil import copy2", "= [] obj = self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None) sJson =", "else: pass return def setup_parameter(self, aPara_in= None): if aPara_in is not None: self.nParameter_subbasin", "None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy =", "__metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None", "in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip = [] obj =", "aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name:", "return def tojson(self): aSkip = [] obj = self.__dict__.copy() for sKey in aSkip:", "= pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self):", "aParameter_subbasin_name = None def __init__(self, aConfig_in =None): if aConfig_in is not None: pass", "from shutil import copyfile from abc import ABCMeta, abstractmethod import datetime from shutil", "import glob import shutil import numpy as np from pathlib import Path import", "pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName)", "= json.dumps(obj,\\ sort_keys=True, \\ indent = 4, \\ ensure_ascii=True, \\ cls=SubbasinClassEncoder) return sJson", "int(obj) if isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj,", "setup_parameter(self, aPara_in= None): if aPara_in is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name", "self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip = [] obj = self.__dict__.copy()", "pass else: pass return def setup_parameter(self, aPara_in= None): if aPara_in is not None:", "def __init__(self, aConfig_in =None): if aConfig_in is not None: pass else: pass return", "nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in =None): if", "subprocess from shutil import copyfile from abc import ABCMeta, abstractmethod import datetime from", "obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent = 4, \\ ensure_ascii=True, \\", "sys import glob import shutil import numpy as np from pathlib import Path", "swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__", "pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer =", "shutil import numpy as np from pathlib import Path import tarfile import subprocess", "in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent = 4, \\", "from shutil import copy2 import json from json import JSONEncoder from swaty.classes.swatpara import", "return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1", "isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray): return", "__init__(self, aConfig_in =None): if aConfig_in is not None: pass else: pass return def", "copy2 import json from json import JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder):", "obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0 aParameter_subbasin=None", "= self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\", "isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return", "is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin):", "import shutil import numpy as np from pathlib import Path import tarfile import", "return int(obj) if isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if", "self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip = [] obj = self.__dict__.copy() for", "1 nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in =None): if aConfig_in is", "isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass", "import ABCMeta, abstractmethod import datetime from shutil import copy2 import json from json", "return def setup_parameter(self, aPara_in= None): if aPara_in is not None: self.nParameter_subbasin = len(aPara_in)", "ABCMeta, abstractmethod import datetime from shutil import copy2 import json from json import", "aPara_in is not None: self.nParameter_subbasin = len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in", "datetime from shutil import copy2 import json from json import JSONEncoder from swaty.classes.swatpara", "None: pass else: pass return def setup_parameter(self, aPara_in= None): if aPara_in is not", "subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName if sName not", "import sys import glob import shutil import numpy as np from pathlib import", "import Path import tarfile import subprocess from shutil import copyfile from abc import", "from json import JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj):", "numpy as np from pathlib import Path import tarfile import subprocess from shutil", "np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass return", "if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def tojson(self): aSkip =", "abstractmethod import datetime from shutil import copy2 import json from json import JSONEncoder", "self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin)", "JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1 iFlag_subbasin=0 nSoil_layer = 1 nParameter_subbasin=0", "nParameter_subbasin=0 aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in =None): if aConfig_in is not", "from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return", "aParameter_subbasin=None aParameter_subbasin_name = None def __init__(self, aConfig_in =None): if aConfig_in is not None:", "if isinstance(obj, list): pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ = ABCMeta lIndex_subbasin=-1", "aConfig_in =None): if aConfig_in is not None: pass else: pass return def setup_parameter(self,", "from pathlib import Path import tarfile import subprocess from shutil import copyfile from", "len(aPara_in) self.aParameter_subbasin=list() self.aParameter_subbasin_name =list() for i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin =", "if isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara):", "pathlib import Path import tarfile import subprocess from shutil import copyfile from abc", "aSkip = [] obj = self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None) sJson", "swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj,", "import JSONEncoder from swaty.classes.swatpara import swatpara class SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj,", "aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True, \\ indent = 4, \\ ensure_ascii=True,", "i in range(self.nParameter_subbasin): subbasin_dummy = aPara_in[i] pParameter_subbasin = swatpara(subbasin_dummy) self.aParameter_subbasin.append(pParameter_subbasin) sName = pParameter_subbasin.sName", "def tojson(self): aSkip = [] obj = self.__dict__.copy() for sKey in aSkip: obj.pop(sKey,", "np from pathlib import Path import tarfile import subprocess from shutil import copyfile", "if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32): return float(obj) if isinstance(obj, np.ndarray):", "isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self, obj) class pysubbasin(object):", "import subprocess from shutil import copyfile from abc import ABCMeta, abstractmethod import datetime", "return json.loads(obj.tojson()) if isinstance(obj, list): pass return JSONEncoder.default(self, obj) class pysubbasin(object): __metaclass__ =", "= None def __init__(self, aConfig_in =None): if aConfig_in is not None: pass else:", "sName = pParameter_subbasin.sName if sName not in self.aParameter_subbasin_name: self.aParameter_subbasin_name.append(sName) else: pass return def", "float(obj) if isinstance(obj, np.ndarray): return obj.tolist() if isinstance(obj, swatpara): return json.loads(obj.tojson()) if isinstance(obj,", "SubbasinClassEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32): return", "obj = self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None) sJson = json.dumps(obj,\\ sort_keys=True,", "tojson(self): aSkip = [] obj = self.__dict__.copy() for sKey in aSkip: obj.pop(sKey, None)", "obj): if isinstance(obj, np.integer): return int(obj) if isinstance(obj, np.float32): return float(obj) if isinstance(obj," ]
[ "permutation of a palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count =", "if it is a permutation of a palindrome def palindrome_permutation(str1): str1 = str1.replace(\"", "function to check if it is a permutation of a palindrome def palindrome_permutation(str1):", "= {} for char in str1: if count.get(char): count[char] += 1 else: count[char]", "count = {} for char in str1: if count.get(char): count[char] += 1 else:", "write a function to check if it is a permutation of a palindrome", "!= 0: odds += 1 if odds > 1: return False return True", "print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo", "it is a permutation of a palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \",", "in str1: if count.get(char): count[char] += 1 else: count[char] = 1 odds =", "str1: if count.get(char): count[char] += 1 else: count[char] = 1 odds = 0", "char, num in count.items(): if num % 2 != 0: odds += 1", "# True, race car print(palindrome_permutation(\"livo veile\")) # True, evil olive print(palindrome_permutation(\"not one\")) #", "ear\")) # True, race car print(palindrome_permutation(\"livo veile\")) # True, evil olive print(palindrome_permutation(\"not one\"))", "# True, taco cat print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo veile\")) #", "def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count = {} for char in", "\"\") count = {} for char in str1: if count.get(char): count[char] += 1", "taco cat print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo veile\")) # True, evil", "a function to check if it is a permutation of a palindrome def", "> 1: return False return True print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr", "1: return False return True print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\"))", "= 0 for char, num in count.items(): if num % 2 != 0:", "palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count = {} for char", "print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo veile\")) # True, evil olive print(palindrome_permutation(\"not", "+= 1 else: count[char] = 1 odds = 0 for char, num in", "False return True print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\")) # True,", "1 odds = 0 for char, num in count.items(): if num % 2", "cat print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo veile\")) # True, evil olive", "if odds > 1: return False return True print(palindrome_permutation(\"tact coa\")) # True, taco", "return False return True print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\")) #", "\", \"\") count = {} for char in str1: if count.get(char): count[char] +=", "str1.replace(\" \", \"\") count = {} for char in str1: if count.get(char): count[char]", "odds > 1: return False return True print(palindrome_permutation(\"tact coa\")) # True, taco cat", "check if it is a permutation of a palindrome def palindrome_permutation(str1): str1 =", "= str1.replace(\" \", \"\") count = {} for char in str1: if count.get(char):", "count[char] = 1 odds = 0 for char, num in count.items(): if num", "of a palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count = {}", "str1 = str1.replace(\" \", \"\") count = {} for char in str1: if", "in count.items(): if num % 2 != 0: odds += 1 if odds", "count.items(): if num % 2 != 0: odds += 1 if odds >", "a palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count = {} for", "Given a string, write a function to check if it is a permutation", "for char in str1: if count.get(char): count[char] += 1 else: count[char] = 1", "<filename>Ch1-Arrays-and-Strings/04_palindrome_permutation.py<gh_stars>0 # Given a string, write a function to check if it is", "% 2 != 0: odds += 1 if odds > 1: return False", "for char, num in count.items(): if num % 2 != 0: odds +=", "+= 1 if odds > 1: return False return True print(palindrome_permutation(\"tact coa\")) #", "else: count[char] = 1 odds = 0 for char, num in count.items(): if", "= 1 odds = 0 for char, num in count.items(): if num %", "odds += 1 if odds > 1: return False return True print(palindrome_permutation(\"tact coa\"))", "odds = 0 for char, num in count.items(): if num % 2 !=", "num % 2 != 0: odds += 1 if odds > 1: return", "if num % 2 != 0: odds += 1 if odds > 1:", "{} for char in str1: if count.get(char): count[char] += 1 else: count[char] =", "is a permutation of a palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\")", "count[char] += 1 else: count[char] = 1 odds = 0 for char, num", "if count.get(char): count[char] += 1 else: count[char] = 1 odds = 0 for", "coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo veile\"))", "return True print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\")) # True, race", "True, race car print(palindrome_permutation(\"livo veile\")) # True, evil olive print(palindrome_permutation(\"not one\")) # False", "1 else: count[char] = 1 odds = 0 for char, num in count.items():", "0: odds += 1 if odds > 1: return False return True print(palindrome_permutation(\"tact", "True, taco cat print(palindrome_permutation(\"cacr ear\")) # True, race car print(palindrome_permutation(\"livo veile\")) # True,", "a permutation of a palindrome def palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count", "count.get(char): count[char] += 1 else: count[char] = 1 odds = 0 for char,", "string, write a function to check if it is a permutation of a", "0 for char, num in count.items(): if num % 2 != 0: odds", "palindrome_permutation(str1): str1 = str1.replace(\" \", \"\") count = {} for char in str1:", "1 if odds > 1: return False return True print(palindrome_permutation(\"tact coa\")) # True,", "char in str1: if count.get(char): count[char] += 1 else: count[char] = 1 odds", "# Given a string, write a function to check if it is a", "to check if it is a permutation of a palindrome def palindrome_permutation(str1): str1", "num in count.items(): if num % 2 != 0: odds += 1 if", "2 != 0: odds += 1 if odds > 1: return False return", "True print(palindrome_permutation(\"tact coa\")) # True, taco cat print(palindrome_permutation(\"cacr ear\")) # True, race car", "a string, write a function to check if it is a permutation of" ]
[ "\"\"\" This is our most basic model. This presupposes you know how to", "Enter the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo() ; curses.nocbreak() curses.endwin() #", "stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main loop finally: stdscr.erase()", "For now, put it in your 'to learn' notes. You can come back", "113: keypress = stdscr.getch() print keypress if __name__=='__main__': \"\"\" This is our most", "identical to example 1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1)", "1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter", "try... finally... block. For now, put it in your 'to learn' notes. You", "\"\"\" Curses is controlled from here. This might be called 'the loop' in", "block. For now, put it in your 'to learn' notes. You can come", "components initialized by curses.wrapper. This code is pretty identical to example 1. Moving", "might be called 'the loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10)", "import curses def main(stdscr): \"\"\" Curses is controlled from here. This might be", "You can come back to this to see the components initialized by curses.wrapper.", "; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0)", "Curses is controlled from here. This might be called 'the loop' in a", "# Enter the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo() ; curses.nocbreak() curses.endwin()", "!= 113: keypress = stdscr.getch() print keypress if __name__=='__main__': \"\"\" This is our", "'the loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int()", "try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main loop finally:", "it in your 'to learn' notes. You can come back to this to", "code is pretty identical to example 1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho()", "loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() #", "initialized by curses.wrapper. This code is pretty identical to example 1. Moving on!!", "113 is the lowercase 'q' key. while keypress != 113: keypress = stdscr.getch()", "to read a try... finally... block. For now, put it in your 'to", "lowercase 'q' key. while keypress != 113: keypress = stdscr.getch() print keypress if", "presupposes you know how to read a try... finally... block. For now, put", "is controlled from here. This might be called 'the loop' in a game.", "basic model. This presupposes you know how to read a try... finally... block.", "pretty identical to example 1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak()", "is our most basic model. This presupposes you know how to read a", "be called 'the loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress", "if __name__=='__main__': \"\"\" This is our most basic model. This presupposes you know", "called 'the loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress =", "learn' notes. You can come back to this to see the components initialized", "stdscr.getch() print keypress if __name__=='__main__': \"\"\" This is our most basic model. This", "example 1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) #", "finally... block. For now, put it in your 'to learn' notes. You can", "This code is pretty identical to example 1. Moving on!! \"\"\" try: stdscr=curses.initscr()", "main(stdscr) # Enter the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo() ; curses.nocbreak()", "stdscr.keypad(1) main(stdscr) # Enter the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo() ;", "curses.wrapper. This code is pretty identical to example 1. Moving on!! \"\"\" try:", "on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main", "curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main loop finally: stdscr.erase() stdscr.refresh()", "curses def main(stdscr): \"\"\" Curses is controlled from here. This might be called", "# 113 is the lowercase 'q' key. while keypress != 113: keypress =", "keypress != 113: keypress = stdscr.getch() print keypress if __name__=='__main__': \"\"\" This is", "main(stdscr): \"\"\" Curses is controlled from here. This might be called 'the loop'", "This presupposes you know how to read a try... finally... block. For now,", "is the lowercase 'q' key. while keypress != 113: keypress = stdscr.getch() print", "int() # 113 is the lowercase 'q' key. while keypress != 113: keypress", "notes. You can come back to this to see the components initialized by", "in your 'to learn' notes. You can come back to this to see", "curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is the lowercase 'q' key. while keypress", "This is our most basic model. This presupposes you know how to read", "come back to this to see the components initialized by curses.wrapper. This code", "here. This might be called 'the loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html", "a try... finally... block. For now, put it in your 'to learn' notes.", "most basic model. This presupposes you know how to read a try... finally...", "This might be called 'the loop' in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\"", "\"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main loop", "from here. This might be called 'the loop' in a game. game loop:", "game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is the", "how to read a try... finally... block. For now, put it in your", "__name__=='__main__': \"\"\" This is our most basic model. This presupposes you know how", "now, put it in your 'to learn' notes. You can come back to", "your 'to learn' notes. You can come back to this to see the", "in a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113", "game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is the lowercase", "to this to see the components initialized by curses.wrapper. This code is pretty", "the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo() ; curses.nocbreak() curses.endwin() # Terminate", "= stdscr.getch() print keypress if __name__=='__main__': \"\"\" This is our most basic model.", "keypress = stdscr.getch() print keypress if __name__=='__main__': \"\"\" This is our most basic", "http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is the lowercase 'q' key.", "this to see the components initialized by curses.wrapper. This code is pretty identical", "know how to read a try... finally... block. For now, put it in", "to see the components initialized by curses.wrapper. This code is pretty identical to", "keypress if __name__=='__main__': \"\"\" This is our most basic model. This presupposes you", "while keypress != 113: keypress = stdscr.getch() print keypress if __name__=='__main__': \"\"\" This", "key. while keypress != 113: keypress = stdscr.getch() print keypress if __name__=='__main__': \"\"\"", "to example 1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr)", "'q' key. while keypress != 113: keypress = stdscr.getch() print keypress if __name__=='__main__':", "model. This presupposes you know how to read a try... finally... block. For", "the lowercase 'q' key. while keypress != 113: keypress = stdscr.getch() print keypress", "can come back to this to see the components initialized by curses.wrapper. This", "by curses.wrapper. This code is pretty identical to example 1. Moving on!! \"\"\"", "= int() # 113 is the lowercase 'q' key. while keypress != 113:", "put it in your 'to learn' notes. You can come back to this", "controlled from here. This might be called 'the loop' in a game. game", "curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo()", "back to this to see the components initialized by curses.wrapper. This code is", "read a try... finally... block. For now, put it in your 'to learn'", "is pretty identical to example 1. Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ;", "def main(stdscr): \"\"\" Curses is controlled from here. This might be called 'the", "loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is the lowercase 'q'", "'to learn' notes. You can come back to this to see the components", "Moving on!! \"\"\" try: stdscr=curses.initscr() curses.noecho() ; curses.cbreak() stdscr.keypad(1) main(stdscr) # Enter the", "the components initialized by curses.wrapper. This code is pretty identical to example 1.", "\"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is the lowercase 'q' key. while", "keypress = int() # 113 is the lowercase 'q' key. while keypress !=", "print keypress if __name__=='__main__': \"\"\" This is our most basic model. This presupposes", "main loop finally: stdscr.erase() stdscr.refresh() stdscr.keypad(0) curses.echo() ; curses.nocbreak() curses.endwin() # Terminate curses", "see the components initialized by curses.wrapper. This code is pretty identical to example", "a game. game loop: http://gameprogrammingpatterns.com/game-loop.html \"\"\" curses.textpad.rectangle(stdscr,0,0,10,10) keypress = int() # 113 is", "you know how to read a try... finally... block. For now, put it", "our most basic model. This presupposes you know how to read a try..." ]
[ "namecomp from ._treecompare import treedups from ._treecompare import treepurge from ._treecompare import duplicate", "from ._treecompare import namecomp from ._treecompare import treedups from ._treecompare import treepurge from", "API from ._treecompare import namecomp from ._treecompare import treedups from ._treecompare import treepurge", "import namecomp from ._treecompare import treedups from ._treecompare import treepurge from ._treecompare import", "._treecompare import namecomp from ._treecompare import treedups from ._treecompare import treepurge from ._treecompare", "# API from ._treecompare import namecomp from ._treecompare import treedups from ._treecompare import" ]
[ "= json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx)", "ctx = {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return", "tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx)", "def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not in cache: ctx = load_context(key)", "if filename not in cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return", "'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not in cache: ctx =", "import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def _schema_exists(filename):", "pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def", "= yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key)", "{'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except", "return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key)", "return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not in cache:", "= load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename =", "{} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key):", "not in cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def", "cache: ctx = {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx", "cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None):", "ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] =", "cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema':", "not in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx", "def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not in cache: ctx = {}", "json import pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache =", "'{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename]", "return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not in cache: ctx", "filename = '{}.yaml.tmpl'.format(key) if filename not in cache: ctx = load_context(key) cache[filename] =", "cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if", "Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not in", "ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError: print(\"Error decoding schema", "load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not in cache: ctx = {} if", "import yaml import json import pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'),", "ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename] def load_schema(key): filename =", "cache[filename] = ctx return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not", "filename = '{}.ctx.yaml'.format(key) if filename not in cache: ctx = {} if _schema_exists(filename):", "os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def _schema_exists(filename): return", "= {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered)", "load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not in cache: if _schema_exists(filename): cache[filename]", "cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx:", "def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename", "'{}.schema.json'.format(key) if filename not in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename", "Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def", "= '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try:", "if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)}", "import Environment, PackageLoader, select_autoescape import yaml import json import pkg_resources import os env", "= '{}.schema.json'.format(key) if filename not in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else:", "filename = '{}.schema.json'.format(key) if filename not in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename))", "ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename", "load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not in cache: ctx = load_context(key) cache[filename]", "= '{}.yaml.tmpl'.format(key) if filename not in cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx),", "filename not in cache: ctx = {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader)", "{} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename] def", "from jinja2 import Environment, PackageLoader, select_autoescape import yaml import json import pkg_resources import", "if filename not in cache: ctx = {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename),", "= Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename))", "filename not in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key)", "if filename not in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename =", "load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError:", "load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key)", "yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if", "yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not", "ctx return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not in cache:", "_schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if", "env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError: print(\"Error decoding schema JSON:\\n{}\".format(rendered)) return cache[filename]", "= yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename", "yaml.FullLoader) cache[filename] = ctx return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename", "not in cache: ctx = {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename]", "pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not in cache: ctx", "= ctx return cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not in", "_schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename] def load_schema(key): filename", "cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not in cache: if", "if extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError: print(\"Error", "= '{}.ctx.yaml'.format(key) if filename not in cache: ctx = {} if _schema_exists(filename): ctx", "if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename] def load_schema(key):", "def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not in cache: if _schema_exists(filename):", "import json import pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache", "select_autoescape import yaml import json import pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics',", "'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename", "import pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {}", "extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not in cache: if _schema_exists(filename): cache[filename] =", "else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered =", "in cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename] def load_json_schema(key,", "in cache: ctx = {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] =", "'schemas'), ) cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return", "'{}.yaml.tmpl'.format(key) if filename not in cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader)", "= {} if _schema_exists(filename): ctx = yaml.load(_schema_open(filename), yaml.FullLoader) cache[filename] = ctx return cache[filename]", "cache[filename] def load_schema(key): filename = '{}.yaml.tmpl'.format(key) if filename not in cache: ctx =", "= {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def", "jinja2 import Environment, PackageLoader, select_autoescape import yaml import json import pkg_resources import os", ") cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics',", "return cache[filename] def load_json_schema(key, extra_ctx=None): filename = '{}.schema.json'.format(key) if filename not in cache:", "_schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename =", "cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename))", "PackageLoader, select_autoescape import yaml import json import pkg_resources import os env = Environment(", "Environment, PackageLoader, select_autoescape import yaml import json import pkg_resources import os env =", "yaml import json import pkg_resources import os env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), )", "json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx = {'schema': load_schema(key)} if extra_ctx: ctx.update(extra_ctx) rendered", "pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if", "in cache: if _schema_exists(filename): cache[filename] = json.load(_schema_open(filename)) else: tmpl_filename = '{}.schema.json.tmpl'.format(key) ctx =", "loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics', 'schemas/{}'.format(filename)) def _schema_open(filename):", "filename not in cache: ctx = load_context(key) cache[filename] = yaml.load(env.get_template(filename).render(ctx), Loader=yaml.FullLoader) return cache[filename]", "def _schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not", "= env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError: print(\"Error decoding schema JSON:\\n{}\".format(rendered)) return", "env = Environment( loader=PackageLoader('kite_metrics', 'schemas'), ) cache = {} def _schema_exists(filename): return pkg_resources.resource_exists('kite_metrics',", "'{}.ctx.yaml'.format(key) if filename not in cache: ctx = {} if _schema_exists(filename): ctx =", "rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError: print(\"Error decoding schema JSON:\\n{}\".format(rendered))", "extra_ctx: ctx.update(extra_ctx) rendered = env.get_template(tmpl_filename).render(ctx) try: cache[filename] = json.loads(rendered) except json.decoder.JSONDecodeError: print(\"Error decoding", "_schema_open(filename): return pkg_resources.resource_stream('kite_metrics', 'schemas/{}'.format(filename)) def load_context(key): filename = '{}.ctx.yaml'.format(key) if filename not in" ]
[ "(Cisco Systems) and others # # # # All rights reserved. This program", "== partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host',", "Systems) and others # # # # All rights reserved. This program and", "Systems), <NAME> (Cisco Systems) and others # # # # All rights reserved.", "self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(),", "under the terms of the Apache License, Version 2.0 # # which accompanies", "[] self.osds = [] self.disks = [] self.partitions = [] def setup(self, env,", "'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): #", "osd.get('host', 'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and", "host in self.hosts: if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data',", "Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\")", "add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition in self.partitions: if (partition.get('master_disk', 'None') ==", "partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None')", "osd): # link_type: \"host-osd\" metadata = osd.get('metadata', '') for host in self.hosts: if", "osd in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for disk in self.disks:", "available at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration from base.utils.origins", "= osd.get('metadata', '') for host in self.hosts: if host.get('id', 'None') == osd.get('host', ''):", "class FindLinksForDisks(FindLinks): # per future ceph releases this might need revisions DB_PARTITION_PATH_ATT =", "== disk.get('name', '')) and ( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\":", "osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({", "}) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\":", "== osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type:", "super().__init__() self.environment_type = None self.hosts = [] self.osds = [] self.disks = []", "or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host',", "target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id',", "def setup(self, env, origin: Origin = None): super().setup(env, origin) self.configuration = Configuration() self.environment_type", "= [] def setup(self, env, origin: Origin = None): super().setup(env, origin) self.configuration =", "distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import", "if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', '')))", "self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd) for partition", "env, origin: Origin = None): super().setup(env, origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type()", "= target.get('id', '') extra = {\"source_label\": source_label, \"target_label\": target_label} if extra_att: extra.update(extra_att) self.link_items(source,", "host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds =", "and the accompanying materials # # are made available under the terms of", "def add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\":", "self.environment_type = None self.hosts = [] self.osds = [] self.disks = [] self.partitions", "== disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None):", "link_type: \"partition-disk\" for partition in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', '')) and", "( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self,", "source.get('id', '')) target_label = target.get('id', '') extra = {\"source_label\": source_label, \"target_label\": target_label} if", "= None): super().setup(env, origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding", "self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', '')) and ( partition.get('host', 'None') == disk.get('host',", "in self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and", "# http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration from base.utils.origins import Origin from", "osd.get('metadata', '') for host in self.hosts: if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host,", "in self.hosts: if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')})", "FindLinks class FindLinksForDisks(FindLinks): # per future ceph releases this might need revisions DB_PARTITION_PATH_ATT", "== partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type:", "def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata = osd.get('metadata', '') for host in", "osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd in", "= [] self.partitions = [] def setup(self, env, origin: Origin = None): super().setup(env,", "# # <NAME> (Cisco Systems), <NAME> (Cisco Systems) and others # # #", "# # # All rights reserved. This program and the accompanying materials #", "= self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\") self.hosts =", "<NAME> (Cisco Systems), <NAME> (Cisco Systems) and others # # # # All", "and others # # # # All rights reserved. This program and the", "releases this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self):", "<NAME> (Cisco Systems), # # <NAME> (Cisco Systems), <NAME> (Cisco Systems) and others", "made available under the terms of the Apache License, Version 2.0 # #", "License, Version 2.0 # # which accompanies this distribution, and is available at", "import Configuration from base.utils.origins import Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): #", "self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd)", "source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id', '') extra = {\"source_label\": source_label, \"target_label\":", "'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None self.hosts = []", "This program and the accompanying materials # # are made available under the", "link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id',", "add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd in self.osds: metadata = osd.get('metadata', '')", "''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for", "\"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for osd in", "BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None self.hosts = [] self.osds", "= self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\":", "''), source.get('id', '')) target_label = target.get('id', '') extra = {\"source_label\": source_label, \"target_label\": target_label}", "self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata = osd.get('metadata', '') for host", "osd.get('host', 'None') == partition.get('host', ''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd,", "or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def", "Origin = None): super().setup(env, origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self):", "= self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\":", "partition in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', '')) and ( partition.get('host', 'None')", "self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\":", "= 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None self.hosts =", "partition in self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): #", "the terms of the Apache License, Version 2.0 # # which accompanies this", "= self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd) for", "self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def", "\"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks =", "def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd in self.osds: metadata = osd.get('metadata',", "self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({", "base.utils.origins import Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per future ceph", "Copyright (c) 2017-2020 <NAME> (Cisco Systems), # # <NAME> (Cisco Systems), <NAME> (Cisco", "metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host',", "self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types: host-osd,", "self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\"", "revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None", "setup(self, env, origin: Origin = None): super().setup(env, origin) self.configuration = Configuration() self.environment_type =", "terms of the Apache License, Version 2.0 # # which accompanies this distribution,", "[] def setup(self, env, origin: Origin = None): super().setup(env, origin) self.configuration = Configuration()", "this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__()", "osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') ==", "self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and (", "this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration", "self.osds = [] self.disks = [] self.partitions = [] def setup(self, env, origin:", "extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd in self.osds:", "metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd in self.osds: metadata", "# link_type: \"partition-disk\" for partition in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', ''))", "== partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT,", "in self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type:", "}) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\":", "import FindLinks class FindLinksForDisks(FindLinks): # per future ceph releases this might need revisions", "\"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({", "def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition in self.partitions: if (partition.get('master_disk', 'None')", "# link_type: \"osd-partition\" for osd in self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT,", "'')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for", "is available at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration from", "== partition.get('host', ''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\":", "'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None self.hosts = [] self.osds = []", "(c) 2017-2020 <NAME> (Cisco Systems), # # <NAME> (Cisco Systems), <NAME> (Cisco Systems)", "# # # # All rights reserved. This program and the accompanying materials", "'None') == disk.get('name', '')) and ( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk,", "'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): #", "'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or (", "add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata = osd.get('metadata', '') for host in self.hosts:", "and is available at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration", "self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\"", "in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata = osd.get('metadata', '')", "self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition", "}) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for osd in self.osds:", "'{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label", "# # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration from base.utils.origins import Origin", "in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', '')) and ( partition.get('host', 'None') ==", "Apache License, Version 2.0 # # which accompanies this distribution, and is available", "and ( osd.get('host', 'None') == partition.get('host', ''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point',", "from base.utils.configuration import Configuration from base.utils.origins import Origin from scan.link_finders.find_links import FindLinks class", "= None self.hosts = [] self.osds = [] self.disks = [] self.partitions =", "( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self,", "rights reserved. This program and the accompanying materials # # are made available", "'') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host',", "and ( osd.get('host', 'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device',", "= Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition,", "self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions", "partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\":", "origin: Origin = None): super().setup(env, origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def", "((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or", "'')) and ( osd.get('host', 'None') == partition.get('host', ''))) or ( metadata.get('osd_data', 'None') ==", "\"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" })", "link_type: \"host-osd\" metadata = osd.get('metadata', '') for host in self.hosts: if host.get('id', 'None')", "self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name',", "self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(),", "self.hosts: if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def", "############################################################################### from base.utils.configuration import Configuration from base.utils.origins import Origin from scan.link_finders.find_links import FindLinks", "for partition in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', '')) and ( partition.get('host',", "accompanying materials # # are made available under the terms of the Apache", "''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')})", "\"environment\": self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" })", "self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd", "# # which accompanies this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0", "extra = {\"source_label\": source_label, \"target_label\": target_label} if extra_att: extra.update(extra_att) self.link_items(source, target, link_name=link_name, extra_attributes=extra)", "self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks", "metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or", "\"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration from base.utils.origins import", "}) for osd in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for disk", "which accompanies this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 # ###############################################################################", "FindLinksForDisks(FindLinks): # per future ceph releases this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path'", "<NAME> (Cisco Systems) and others # # # # All rights reserved. This", "self.hosts = [] self.osds = [] self.disks = [] self.partitions = [] def", "partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', ''))", "'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target,", "osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\"", "from base.utils.origins import Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per future", "# link_type: \"host-osd\" metadata = osd.get('metadata', '') for host in self.hosts: if host.get('id',", "super().setup(env, origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of", "# <NAME> (Cisco Systems), <NAME> (Cisco Systems) and others # # # #", "import Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per future ceph releases", "are made available under the terms of the Apache License, Version 2.0 #", "for host in self.hosts: if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\":", "target.get('id', '') extra = {\"source_label\": source_label, \"target_label\": target_label} if extra_att: extra.update(extra_att) self.link_items(source, target,", "'')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label", "extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition in self.partitions:", "target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro',", "source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id', '') extra", "= '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', ''))", "accompanies this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from", "self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self,", "''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') ==", "\"environment\": self.get_env(), \"type\": \"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd) for partition in", "link_type: \"osd-partition\" for osd in self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None')", "ceph releases this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def", "\"type\": \"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition)", "'None') == partition.get('host', ''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition,", "\"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({", "partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition in", "= self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\":", "Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per future ceph releases this", "None self.hosts = [] self.osds = [] self.disks = [] self.partitions = []", "= '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id', '') extra =", "############################################################################### # Copyright (c) 2017-2020 <NAME> (Cisco Systems), # # <NAME> (Cisco Systems),", "extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''),", "2.0 # # which accompanies this distribution, and is available at # #", "Configuration from base.utils.origins import Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per", "the Apache License, Version 2.0 # # which accompanies this distribution, and is", "in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk)", "'{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id', '') extra = {\"source_label\":", "Systems), # # <NAME> (Cisco Systems), <NAME> (Cisco Systems) and others # #", "\"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for osd", "def add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label =", "add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region',", "# ############################################################################### from base.utils.configuration import Configuration from base.utils.origins import Origin from scan.link_finders.find_links import", "( osd.get('host', 'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', ''))", "# # are made available under the terms of the Apache License, Version", "'') for host in self.hosts: if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd,", "partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or ( metadata.get('osd_data', 'None')", "disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name", "if (partition.get('master_disk', 'None') == disk.get('name', '')) and ( partition.get('host', 'None') == disk.get('host', 'None')):", "= 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None self.hosts = [] self.osds =", "self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"disk\" }) for", "metadata = osd.get('metadata', '') for host in self.hosts: if host.get('id', 'None') == osd.get('host',", "__init__(self): super().__init__() self.environment_type = None self.hosts = [] self.osds = [] self.disks =", "add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name,", "disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'),", "( osd.get('host', 'None') == partition.get('host', ''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')):", "All rights reserved. This program and the accompanying materials # # are made", "'')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition in self.partitions: if (partition.get('master_disk',", "'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label =", "per future ceph releases this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT =", "metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\" for partition in self.partitions: if", "'')) and ( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')})", "source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name', '')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''),", "(Cisco Systems), <NAME> (Cisco Systems) and others # # # # All rights", "'')) and ( osd.get('host', 'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') ==", "others # # # # All rights reserved. This program and the accompanying", "self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"osd\"", "Version 2.0 # # which accompanies this distribution, and is available at #", "reserved. This program and the accompanying materials # # are made available under", "== partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or ( metadata.get('osd_data',", "'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or ((", "extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name = '{}-{}'.format(source.get('name', 'None'), target.get('name',", "and ( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def", "2017-2020 <NAME> (Cisco Systems), # # <NAME> (Cisco Systems), <NAME> (Cisco Systems) and", "for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata =", "'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source, target, extra_att=None): link_name =", "self.get_env(), \"type\": \"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions:", "origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types:", "if host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self,", "\"host-osd\" metadata = osd.get('metadata', '') for host in self.hosts: if host.get('id', 'None') ==", "# # All rights reserved. This program and the accompanying materials # #", "= [] self.disks = [] self.partitions = [] def setup(self, env, origin: Origin", "for partition in self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd):", "future ceph releases this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path'", "osd in self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', ''))", "scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per future ceph releases this might need", "(( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', '')))", "the accompanying materials # # are made available under the terms of the", "DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type = None self.hosts", "target_label = target.get('id', '') extra = {\"source_label\": source_label, \"target_label\": target_label} if extra_att: extra.update(extra_att)", "metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk):", "\"partition-disk\" for partition in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name', '')) and (", "base.utils.configuration import Configuration from base.utils.origins import Origin from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks):", "[] self.disks = [] self.partitions = [] def setup(self, env, origin: Origin =", "''), source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id', '') extra = {\"source_label\": source_label,", "self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links of types: host-osd, osd-partition, partition-disk\") self.hosts", "'')) target_label = target.get('id', '') extra = {\"source_label\": source_label, \"target_label\": target_label} if extra_att:", "\"osd-partition\" for osd in self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') ==", "partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label', '')}) def add_links_with_specifics(self, source,", "disk): # link_type: \"partition-disk\" for partition in self.partitions: if (partition.get('master_disk', 'None') == disk.get('name',", "= [] self.osds = [] self.disks = [] self.partitions = [] def setup(self,", "need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type =", "'') extra = {\"source_label\": source_label, \"target_label\": target_label} if extra_att: extra.update(extra_att) self.link_items(source, target, link_name=link_name,", "self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" }) self.disks = self.inv.find_items({ \"environment\": self.get_env(),", "= osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and ( osd.get('host', 'None')", "'')) source_label = '{}-{}-{}'.format(source.get('cvim_region', ''), source.get('cvim_metro', ''), source.get('id', '')) target_label = target.get('id', '')", "\"environment\": self.get_env(), \"type\": \"osd\" }) self.partitions = self.inv.find_items({ \"environment\": self.get_env(), \"type\": \"partition\" })", "(partition.get('master_disk', 'None') == disk.get('name', '')) and ( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition,", "partition.get('device', '')) and ( osd.get('host', 'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None')", "None): super().setup(env, origin) self.configuration = Configuration() self.environment_type = self.configuration.get_env_type() def add_links(self): self.log.info(\"adding links", "from scan.link_finders.find_links import FindLinks class FindLinksForDisks(FindLinks): # per future ceph releases this might", "of the Apache License, Version 2.0 # # which accompanies this distribution, and", "materials # # are made available under the terms of the Apache License,", "links of types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\"", "self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata", "for osd in self.osds: metadata = osd.get('metadata', '') if ((metadata.get(self.DB_PARTITION_PATH_ATT, 'None') == partition.get('device',", "of types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" })", "# which accompanies this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 #", "'')}) def add_link_for_osds(self, partition): # link_type: \"osd-partition\" for osd in self.osds: metadata =", "partition): # link_type: \"osd-partition\" for osd in self.osds: metadata = osd.get('metadata', '') if", "host.get('id', 'None') == osd.get('host', ''): self.add_links_with_specifics(host, osd, extra_att={\"osd_data\": metadata.get('osd_data', '')}) def add_link_for_osds(self, partition):", "program and the accompanying materials # # are made available under the terms", "\"disk\" }) for osd in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for", "self.partitions: self.add_link_for_osds(partition) for disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\"", "'None') == partition.get('host', ''))) or (( metadata.get(self.BLK_PARTITION_PATH_ATT, 'None') == partition.get('device', '')) and (", "might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT = 'bluestore_bdev_partition_path' def __init__(self): super().__init__() self.environment_type", "self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata = osd.get('metadata', '') for", "[] self.partitions = [] def setup(self, env, origin: Origin = None): super().setup(env, origin)", "types: host-osd, osd-partition, partition-disk\") self.hosts = self.inv.find_items({ \"environment\": self.configuration.env_name, \"type\": \"host\" }) self.osds", "http://www.apache.org/licenses/LICENSE-2.0 # ############################################################################### from base.utils.configuration import Configuration from base.utils.origins import Origin from scan.link_finders.find_links", "disk.get('name', '')) and ( partition.get('host', 'None') == disk.get('host', 'None')): self.add_links_with_specifics(partition, disk, extra_att={\"partition_type\": partition.get('label',", "partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore', '')}) def add_link_for_partitions(self, disk): # link_type: \"partition-disk\"", "self.disks = [] self.partitions = [] def setup(self, env, origin: Origin = None):", "# All rights reserved. This program and the accompanying materials # # are", "def __init__(self): super().__init__() self.environment_type = None self.hosts = [] self.osds = [] self.disks", "# are made available under the terms of the Apache License, Version 2.0", "for osd in self.osds: self.add_link_for_hosts(osd) for partition in self.partitions: self.add_link_for_osds(partition) for disk in", "partition.get('host', ''))) or ( metadata.get('osd_data', 'None') == partition.get('mount_point', '')): self.add_links_with_specifics(osd, partition, extra_att={\"osd_objectstore\": metadata.get('osd_objectstore',", "self.partitions = [] def setup(self, env, origin: Origin = None): super().setup(env, origin) self.configuration", "disk in self.disks: self.add_link_for_partitions(disk) def add_link_for_hosts(self, osd): # link_type: \"host-osd\" metadata = osd.get('metadata',", "available under the terms of the Apache License, Version 2.0 # # which", "# Copyright (c) 2017-2020 <NAME> (Cisco Systems), # # <NAME> (Cisco Systems), <NAME>", "# per future ceph releases this might need revisions DB_PARTITION_PATH_ATT = 'bluefs_db_partition_path' BLK_PARTITION_PATH_ATT", "(Cisco Systems), # # <NAME> (Cisco Systems), <NAME> (Cisco Systems) and others #" ]
[ "it should finish. self._shutdown.set() # Ensure pool is done. # The order of", "\"\"\"Shutdown queue and worker and make sure everything gets tidied up.\"\"\" # Ensure", "import config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and manage a worker for", "): # Used to signal to the worker to exit. self._shutdown = mp.Event()", "for scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used", "self.queue.close() self.queue.join() # Signal to worker it should finish. self._shutdown.set() # Ensure pool", "different to if you are using a worker. self._pl.close() self._pl.join() # Shutdown queue", "mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self)", "self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue", "shutdown(self) -> None: \"\"\"Shutdown queue and worker pool and make sure everything gets", "# Start process to to elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_,", "self._shutdown.set() # Ensure pool is done. # The order of these is different", ") self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue and worker and make sure", "to signal to the worker to exit. self._shutdown = mp.Event() # Setup queue", "self._shutdown)), ) self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue and worker and make", "is different to if you are using a worker. self._pl.close() self._pl.join() # Shutdown", "elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self)", "exit. self._shutdown = mp.Event() # Setup queue of items for the scanner. self.queue:", "the queue. self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_,", "worker and make sure everything gets tidied up.\"\"\" # Ensure queue is done.", "-> None: \"\"\"Shutdown queue and worker and make sure everything gets tidied up.\"\"\"", "\"\"\"Create and manage a worker for sending files to es.\"\"\" def __init__(self, config_:", "import queue as queue_ from . import config, elastic, models, scanner class ElasticQueueWorker:", "worker for sending files to es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used to", ") # Start process to to elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue,", "self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self) -> None:", "gets tidied up.\"\"\" # Ensure queue is done. self.queue.close() self.queue.join() # Signal to", "done. # The order of these is different to if you are using", "elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and manage a worker for sending files", "should finish. self._shutdown.set() # Shut the process down. self._pr.join() self._pr.close() # Shutdown queue", "pool is done. # The order of these is different to if you", "__init__(self, config_: config.ScannerSchema): # Used to signal to the worker to exit. self._shutdown", "the worker to exit. self._shutdown = mp.Event() # Setup queue of items for", "# Ensure pool is done. # The order of these is different to", "self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue and worker pool for scanning files.\"\"\"", "self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of workers to", "tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self) ->", "config_[\"queue_length_scale_factor\"] ) # Pool of workers to deal with the queue. self._pl =", "and worker pool and make sure everything gets tidied up.\"\"\" # Ensure queue", "queue and worker pool and make sure everything gets tidied up.\"\"\" # Ensure", "worker it should finish. self._shutdown.set() # Ensure pool is done. # The order", "to exit. self._shutdown = mp.Event() # Setup queue of items for elasticsearch. self.queue:", "as mp import queue as queue_ from . import config, elastic, models, scanner", "# Pool of workers to deal with the queue. self._pl = mp.Pool( #", "from . import config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and manage a", "self._shutdown.set() # Shut the process down. self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread()", "worker to exit. self._shutdown = mp.Event() # Setup queue of items for elasticsearch.", "worker pool and make sure everything gets tidied up.\"\"\" # Ensure queue is", "pool for scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): #", "self._shutdown = mp.Event() # Setup queue of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan]", "a worker for sending files to es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used", "import multiprocessing as mp import queue as queue_ from . import config, elastic,", "and mannage queue and worker pool for scanning files.\"\"\" def __init__( self, config_:", "# Used to signal to the worker to exit. self._shutdown = mp.Event() #", "process down. self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and", "process to to elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), )", "mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of workers to deal", "# Ensure queue is done. self.queue.close() self.queue.join() # Signal to worker it should", "def shutdown(self) -> None: \"\"\"Shutdown queue and worker and make sure everything gets", "Ensure queue is done. self.queue.close() self.queue.join() # Signal to worker it should finish.", "# Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue and worker", "worker to exit. self._shutdown = mp.Event() # Setup queue of items for the", "signal to the worker to exit. self._shutdown = mp.Event() # Setup queue of", "to worker it should finish. self._shutdown.set() # Shut the process down. self._pr.join() self._pr.close()", "config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and manage a worker for sending", "to worker it should finish. self._shutdown.set() # Ensure pool is done. # The", "config_, self._shutdown)), ) self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue and worker and", "es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used to signal to the worker to", "of workers to deal with the queue. self._pl = mp.Pool( # pylint: disable=consider-using-with", "class ElasticQueueWorker: \"\"\"Create and manage a worker for sending files to es.\"\"\" def", "and worker pool for scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File]", "Pool of workers to deal with the queue. self._pl = mp.Pool( # pylint:", "Shut the process down. self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker:", "scanner class ElasticQueueWorker: \"\"\"Create and manage a worker for sending files to es.\"\"\"", "# Setup queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] )", "= mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown", "scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of workers", "queue_.Queue[models.File] ): # Used to signal to the worker to exit. self._shutdown =", "done. self.queue.close() self.queue.join() # Signal to worker it should finish. self._shutdown.set() # Ensure", "# pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self) ->", "to deal with the queue. self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker,", "self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to signal to the worker", "Setup queue of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] *", "config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to signal to the worker to exit.", "elastic_q, config_, self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown queue and worker pool", "to if you are using a worker. self._pl.close() self._pl.join() # Shutdown queue completely.", "self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown queue and worker pool and make", "config_[\"queue_length_scale_factor\"] ) # Start process to to elastic tasks. self._pr = mp.Process( target=elastic.worker,", "up.\"\"\" # Ensure queue is done. self.queue.close() self.queue.join() # Signal to worker it", "queue and worker pool for scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q:", "Setup queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) #", "config_: config.ScannerSchema): # Used to signal to the worker to exit. self._shutdown =", "mannage queue and worker pool for scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema,", "queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue and worker pool for", "queue is done. self.queue.close() self.queue.join() # Signal to worker it should finish. self._shutdown.set()", "to es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used to signal to the worker", "class ScanQueueWorker: \"\"\"Create and mannage queue and worker pool for scanning files.\"\"\" def", "config_, self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown queue and worker pool and", "ScanQueueWorker: \"\"\"Create and mannage queue and worker pool for scanning files.\"\"\" def __init__(", "everything gets tidied up.\"\"\" # Ensure queue is done. self.queue.close() self.queue.join() # Signal", "# The order of these is different to if you are using a", ") def shutdown(self) -> None: \"\"\"Shutdown queue and worker pool and make sure", "Signal to worker it should finish. self._shutdown.set() # Ensure pool is done. #", "deal with the queue. self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue,", "disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown", "for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool", "= mp.Event() # Setup queue of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] =", "the process down. self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create", "and worker and make sure everything gets tidied up.\"\"\" # Ensure queue is", "finish. self._shutdown.set() # Shut the process down. self._pr.join() self._pr.close() # Shutdown queue completely.", "pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self) -> None:", "these is different to if you are using a worker. self._pl.close() self._pl.join() #", "is done. # The order of these is different to if you are", "None: \"\"\"Shutdown queue and worker and make sure everything gets tidied up.\"\"\" #", "items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to", "def __init__(self, config_: config.ScannerSchema): # Used to signal to the worker to exit.", "queue of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"]", "files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to signal", "\"\"\"Create and mannage queue and worker pool for scanning files.\"\"\" def __init__( self,", "is done. self.queue.close() self.queue.join() # Signal to worker it should finish. self._shutdown.set() #", "files to es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used to signal to the", "Signal to worker it should finish. self._shutdown.set() # Shut the process down. self._pr.join()", "done. self.queue.close() self.queue.join() # Signal to worker it should finish. self._shutdown.set() # Shut", "config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to signal to the worker to", "workers to deal with the queue. self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"],", "mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue", "config.ScannerSchema): # Used to signal to the worker to exit. self._shutdown = mp.Event()", "order of these is different to if you are using a worker. self._pl.close()", ". import config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and manage a worker", "and manage a worker for sending files to es.\"\"\" def __init__(self, config_: config.ScannerSchema):", "pool and make sure everything gets tidied up.\"\"\" # Ensure queue is done.", "# Shut the process down. self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread() class", "= mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of workers to deal with", "to exit. self._shutdown = mp.Event() # Setup queue of items for the scanner.", "sending files to es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used to signal to", "ElasticQueueWorker: \"\"\"Create and manage a worker for sending files to es.\"\"\" def __init__(self,", "scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to", "<filename>gws_volume_scanner/scanner/util.py<gh_stars>0 import multiprocessing as mp import queue as queue_ from . import config,", "multiprocessing as mp import queue as queue_ from . import config, elastic, models,", "mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of workers to deal with the", "queue. self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)),", "to elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def", "self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to to elastic tasks.", "self._shutdown = mp.Event() # Setup queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] =", "def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to signal to", "queue_ from . import config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and manage", "with the queue. self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q,", "Ensure pool is done. # The order of these is different to if", "mp import queue as queue_ from . import config, elastic, models, scanner class", "self._pl = mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), )", "self.queue.join() # Signal to worker it should finish. self._shutdown.set() # Ensure pool is", "def shutdown(self) -> None: \"\"\"Shutdown queue and worker pool and make sure everything", "to the worker to exit. self._shutdown = mp.Event() # Setup queue of items", "elastic_q: queue_.Queue[models.File] ): # Used to signal to the worker to exit. self._shutdown", "= mp.Pool( # pylint: disable=consider-using-with processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def", "\"\"\"Shutdown queue and worker pool and make sure everything gets tidied up.\"\"\" #", "The order of these is different to if you are using a worker.", "queue as queue_ from . import config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create", "target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue and", "tidied up.\"\"\" # Ensure queue is done. self.queue.close() self.queue.join() # Signal to worker", "# Setup queue of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"]", ") # Pool of workers to deal with the queue. self._pl = mp.Pool(", "processes=config_[\"scan_processes\"], initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown queue", "down. self._pr.join() self._pr.close() # Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage", "Used to signal to the worker to exit. self._shutdown = mp.Event() # Setup", "Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue and worker pool", "the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of", "args=((self.queue, config_, self._shutdown)), ) self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue and worker", "self._pr.start() def shutdown(self) -> None: \"\"\"Shutdown queue and worker and make sure everything", "items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) #", "of these is different to if you are using a worker. self._pl.close() self._pl.join()", "completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue and worker pool for scanning", "make sure everything gets tidied up.\"\"\" # Ensure queue is done. self.queue.close() self.queue.join()", "* config_[\"queue_length_scale_factor\"] ) # Pool of workers to deal with the queue. self._pl", "mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to to elastic tasks. self._pr = mp.Process(", "if you are using a worker. self._pl.close() self._pl.join() # Shutdown queue completely. self.queue.join_thread()", "sure everything gets tidied up.\"\"\" # Ensure queue is done. self.queue.close() self.queue.join() #", "# Signal to worker it should finish. self._shutdown.set() # Ensure pool is done.", "None: \"\"\"Shutdown queue and worker pool and make sure everything gets tidied up.\"\"\"", "exit. self._shutdown = mp.Event() # Setup queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File]", "Start process to to elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)),", "models, scanner class ElasticQueueWorker: \"\"\"Create and manage a worker for sending files to", "# Signal to worker it should finish. self._shutdown.set() # Shut the process down.", "for sending files to es.\"\"\" def __init__(self, config_: config.ScannerSchema): # Used to signal", "mp.Event() # Setup queue of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue(", "for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to to", "to to elastic tasks. self._pr = mp.Process( target=elastic.worker, args=((self.queue, config_, self._shutdown)), ) self._pr.start()", "initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown queue and worker", "self.queue.join() # Signal to worker it should finish. self._shutdown.set() # Shut the process", "self.queue.close() self.queue.join() # Signal to worker it should finish. self._shutdown.set() # Shut the", "it should finish. self._shutdown.set() # Shut the process down. self._pr.join() self._pr.close() # Shutdown", "of items for the scanner. self.queue: mp.JoinableQueue[scanner.ToScan] = mp.JoinableQueue( config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] )", "config_[\"scan_processes\"] * config_[\"queue_length_scale_factor\"] ) # Pool of workers to deal with the queue.", "= mp.Event() # Setup queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue(", "shutdown(self) -> None: \"\"\"Shutdown queue and worker and make sure everything gets tidied", "and make sure everything gets tidied up.\"\"\" # Ensure queue is done. self.queue.close()", "of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process", "worker it should finish. self._shutdown.set() # Shut the process down. self._pr.join() self._pr.close() #", "-> None: \"\"\"Shutdown queue and worker pool and make sure everything gets tidied", "as queue_ from . import config, elastic, models, scanner class ElasticQueueWorker: \"\"\"Create and", "should finish. self._shutdown.set() # Ensure pool is done. # The order of these", "elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to to elastic", "self._pr.close() # Shutdown queue completely. self.queue.join_thread() class ScanQueueWorker: \"\"\"Create and mannage queue and", "= mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to to elastic tasks. self._pr =", "mp.Event() # Setup queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"]", "queue and worker and make sure everything gets tidied up.\"\"\" # Ensure queue", "initializer=scanner.worker, initargs=((self.queue, elastic_q, config_, self._shutdown)), ) def shutdown(self) -> None: \"\"\"Shutdown queue and", "__init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ): # Used to signal to the", "queue of items for elasticsearch. self.queue: mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start", "manage a worker for sending files to es.\"\"\" def __init__(self, config_: config.ScannerSchema): #", "mp.JoinableQueue[models.File] = mp.JoinableQueue( config_[\"queue_length_scale_factor\"] ) # Start process to to elastic tasks. self._pr", "worker pool for scanning files.\"\"\" def __init__( self, config_: config.ScannerSchema, elastic_q: queue_.Queue[models.File] ):", "finish. self._shutdown.set() # Ensure pool is done. # The order of these is" ]
[ "= \"?\" int_maxMinutes = 1440 int_sleep = 480 def addData(self, minutes): select =", "(@ComicSphinx) from database.DatabaseUtilities import DatabaseUtilities as dbu from datetime import datetime as dt", "for i in range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes - tmp insert", "dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp = 0 for i", "dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month,", "select)): self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def", "= dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self):", "addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp =", "as dbu from datetime import datetime as dt class Plot(): str_sleeping = \"Sleeping\"", "self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp = 0 for i in", "datetime import datetime as dt class Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\"", "1440 int_sleep = 480 def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day,", "dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day,", "480 def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu,", "insert) def addUnknown(self, minutes): tmp = 0 for i in range(len(minutes)): tmp +=", "minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select", "insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp = 0", "minutes[i] tmp = self.int_maxMinutes - tmp insert = dbu.buildInsert(dbu, self.str_unknown, tmp) dbu.executeCommand(dbu, insert)", "+= minutes[i] tmp = self.int_maxMinutes - tmp insert = dbu.buildInsert(dbu, self.str_unknown, tmp) dbu.executeCommand(dbu,", "dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp = 0 for i in range(len(minutes)): tmp", "def addUnknown(self, minutes): tmp = 0 for i in range(len(minutes)): tmp += minutes[i]", "self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes)", "= 1440 int_sleep = 480 def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month,", "i in range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes - tmp insert =", "in range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes - tmp insert = dbu.buildInsert(dbu,", "def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp", "dbu from datetime import datetime as dt class Plot(): str_sleeping = \"Sleeping\" str_unknown", "addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self)", "tmp = 0 for i in range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes", "select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def", "# @Author: <NAME> (@ComicSphinx) from database.DatabaseUtilities import DatabaseUtilities as dbu from datetime import", "datetime as dt class Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes =", "if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu,", "str_unknown = \"?\" int_maxMinutes = 1440 int_sleep = 480 def addData(self, minutes): select", "= dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp = 0 for", "import datetime as dt class Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes", "<NAME> (@ComicSphinx) from database.DatabaseUtilities import DatabaseUtilities as dbu from datetime import datetime as", "(dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert)", "from datetime import datetime as dt class Plot(): str_sleeping = \"Sleeping\" str_unknown =", "DatabaseUtilities as dbu from datetime import datetime as dt class Plot(): str_sleeping =", "def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)):", "addUnknown(self, minutes): tmp = 0 for i in range(len(minutes)): tmp += minutes[i] tmp", "as dt class Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440", "self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep)", "dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert", "= dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu,", "Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440 int_sleep = 480", "select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select =", "minutes): tmp = 0 for i in range(len(minutes)): tmp += minutes[i] tmp =", "int_maxMinutes = 1440 int_sleep = 480 def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year,", "int_sleep = 480 def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping)", "0 for i in range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes - tmp", "= \"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440 int_sleep = 480 def addData(self,", "dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping,", "@Author: <NAME> (@ComicSphinx) from database.DatabaseUtilities import DatabaseUtilities as dbu from datetime import datetime", "dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert =", "dt class Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440 int_sleep", "minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes):", "str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440 int_sleep = 480 def", "(dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)):", "select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self,", "from database.DatabaseUtilities import DatabaseUtilities as dbu from datetime import datetime as dt class", "self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown) if", "import DatabaseUtilities as dbu from datetime import datetime as dt class Plot(): str_sleeping", "range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes - tmp insert = dbu.buildInsert(dbu, self.str_unknown,", "= 0 for i in range(len(minutes)): tmp += minutes[i] tmp = self.int_maxMinutes -", "dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year,", "dt.now().day, self.str_sleeping) if (dbu.dataIsNotExist(dbu, select)): self.addSleep(self) select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_unknown)", "\"?\" int_maxMinutes = 1440 int_sleep = 480 def addData(self, minutes): select = dbu.buildSelect(dbu,", "class Plot(): str_sleeping = \"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440 int_sleep =", "<gh_stars>1-10 # @Author: <NAME> (@ComicSphinx) from database.DatabaseUtilities import DatabaseUtilities as dbu from datetime", "if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu,", "tmp += minutes[i] tmp = self.int_maxMinutes - tmp insert = dbu.buildInsert(dbu, self.str_unknown, tmp)", "database.DatabaseUtilities import DatabaseUtilities as dbu from datetime import datetime as dt class Plot():", "self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self, minutes): tmp = 0 for i in range(len(minutes)):", "dt.now().month, dt.now().day, self.str_unknown) if (dbu.dataIsNotExist(dbu, select)): self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu,", "\"Sleeping\" str_unknown = \"?\" int_maxMinutes = 1440 int_sleep = 480 def addData(self, minutes):", "self.addUnknown(self, minutes) def addSleep(self): insert = dbu.buildInsert(dbu, self.str_sleeping, self.int_sleep) dbu.executeCommand(dbu, insert) def addUnknown(self,", "= 480 def addData(self, minutes): select = dbu.buildSelect(dbu, dt.now().year, dt.now().month, dt.now().day, self.str_sleeping) if" ]
[ "application through the API'), ('api_delete_onadateapplication', 'Can delete application through the API'), ('api_read_own_onadateapplication', 'Can", "'On a date application', 'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication', 'Can view", "# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-11-02 11:30", "'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication', 'Can view application through the API'),", "'Can add application through the API'), ('api_change_periodapplication', 'Can change application through the API'),", "] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False,", "the API'), ('api_add_periodapplication', 'Can add application through the API'), ('api_change_periodapplication', 'Can change application", "to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions':", "add application through the API'), ('api_change_periodapplication', 'Can change application through the API'), ('api_delete_periodapplication',", "1.11.17 on 2020-11-02 11:30 from __future__ import unicode_literals from django.db import migrations, models", "API'), ('api_change_own_onadateapplication', 'Can change own application through the API'), ('api_delete_own_onadateapplication', 'Can delete own", "# Generated by Django 1.11.17 on 2020-11-02 11:30 from __future__ import unicode_literals from", "'permissions': (('api_read_onadateapplication', 'Can view application through the API'), ('api_add_onadateapplication', 'Can add application through", "__future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "through the API'), ('api_delete_onadateapplication', 'Can delete application through the API'), ('api_read_own_onadateapplication', 'Can view", "the API'), ('api_read_own_onadateapplication', 'Can view own application through the API'), ('api_add_own_onadateapplication', 'Can add", "Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel(", "add own application through the API'), ('api_change_own_periodapplication', 'Can change own application through the", "application through the API'), ('api_change_onadateapplication', 'Can change application through the API'), ('api_delete_onadateapplication', 'Can", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations =", "bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period',", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ]", "API'), ('api_delete_onadateapplication', 'Can delete application through the API'), ('api_read_own_onadateapplication', 'Can view own application", "'On a date application', 'permissions': (('api_read_onadateapplication', 'Can view application through the API'), ('api_add_onadateapplication',", "'Can change application through the API'), ('api_delete_onadateapplication', 'Can delete application through the API'),", "the API'), ('api_add_onadateapplication', 'Can add application through the API'), ('api_change_onadateapplication', 'Can change application", "('api_add_own_periodapplication', 'Can add own application through the API'), ('api_change_own_periodapplication', 'Can change own application", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based',", "the API'), ('api_delete_own_periodapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',), ),", "'Can delete application through the API'), ('api_read_own_onadateapplication', 'Can view own application through the", "application', 'permissions': (('api_read_onadateapplication', 'Can view application through the API'), ('api_add_onadateapplication', 'Can add application", "API'), ('api_delete_own_onadateapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel(", "primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'),", "(('api_read_onadateapplication', 'Can view application through the API'), ('api_add_onadateapplication', 'Can add application through the", "API'), ('api_add_own_onadateapplication', 'Can add own application through the API'), ('api_change_own_onadateapplication', 'Can change own", "('api_read_own_onadateapplication', 'Can view own application through the API'), ('api_add_own_onadateapplication', 'Can add own application", "through the API'), ('api_change_own_onadateapplication', 'Can change own application through the API'), ('api_delete_own_onadateapplication', 'Can", "the API'), ('api_change_periodapplication', 'Can change application through the API'), ('api_delete_periodapplication', 'Can delete application", "date application', 'permissions': (('api_read_onadateapplication', 'Can view application through the API'), ('api_add_onadateapplication', 'Can add", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations", "('api_change_onadateapplication', 'Can change application through the API'), ('api_delete_onadateapplication', 'Can delete application through the", "('api_add_onadateapplication', 'Can add application through the API'), ('api_change_onadateapplication', 'Can change application through the", "('api_delete_onadateapplication', 'Can delete application through the API'), ('api_read_own_onadateapplication', 'Can view own application through", "the API'), ('api_delete_onadateapplication', 'Can delete application through the API'), ('api_read_own_onadateapplication', 'Can view own", "'Can delete application through the API'), ('api_read_own_periodapplication', 'Can view own application through the", "('api_change_own_onadateapplication', 'Can change own application through the API'), ('api_delete_own_onadateapplication', 'Can delete own application", "migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)),", "'Can add own application through the API'), ('api_change_own_periodapplication', 'Can change own application through", "application through the API'), ('api_delete_own_onadateapplication', 'Can delete own application through the API')), },", "the API'), ('api_add_own_onadateapplication', 'Can add own application through the API'), ('api_change_own_onadateapplication', 'Can change", "from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "through the API'), ('api_change_onadateapplication', 'Can change application through the API'), ('api_delete_onadateapplication', 'Can delete", "Django 1.11.17 on 2020-11-02 11:30 from __future__ import unicode_literals from django.db import migrations,", "('api_delete_own_periodapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',), ), migrations.RemoveField( model_name='application',", "delete application through the API'), ('api_read_own_onadateapplication', 'Can view own application through the API'),", "('api_delete_own_onadateapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication',", "unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "API'), ('api_add_periodapplication', 'Can add application through the API'), ('api_change_periodapplication', 'Can change application through", "delete application through the API'), ('api_read_own_periodapplication', 'Can view own application through the API'),", "own application through the API'), ('api_change_own_periodapplication', 'Can change own application through the API'),", "('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True,", "'Can delete own application through the API')), }, bases=('activities.contribution',), ), migrations.RemoveField( model_name='application', name='current_period',", "application through the API'), ('api_delete_periodapplication', 'Can delete application through the API'), ('api_read_own_periodapplication', 'Can", "('api_delete_periodapplication', 'Can delete application through the API'), ('api_read_own_periodapplication', 'Can view own application through", "options={ 'verbose_name': 'On a date application', 'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication',", "a date application', 'permissions': (('api_read_onadateapplication', 'Can view application through the API'), ('api_add_onadateapplication', 'Can", "change application through the API'), ('api_delete_onadateapplication', 'Can delete application through the API'), ('api_read_own_onadateapplication',", "models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can", "API'), ('api_add_onadateapplication', 'Can add application through the API'), ('api_change_onadateapplication', 'Can change application through", "through the API'), ('api_delete_own_periodapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',),", "delete own application through the API')), }, bases=('activities.contribution',), ), migrations.RemoveField( model_name='application', name='current_period', ),", "through the API'), ('api_read_own_periodapplication', 'Can view own application through the API'), ('api_add_own_periodapplication', 'Can", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'),", "through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,", "[ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr',", "view own application through the API'), ('api_add_own_onadateapplication', 'Can add own application through the", "[ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={", "-*- # Generated by Django 1.11.17 on 2020-11-02 11:30 from __future__ import unicode_literals", "null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view", "coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-11-02 11:30 from __future__", "view application through the API'), ('api_add_onadateapplication', 'Can add application through the API'), ('api_change_onadateapplication',", "'Can add own application through the API'), ('api_change_own_onadateapplication', 'Can change own application through", "'Can change application through the API'), ('api_delete_periodapplication', 'Can delete application through the API'),", "on 2020-11-02 11:30 from __future__ import unicode_literals from django.db import migrations, models import", "('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a date", "'Can view application through the API'), ('api_add_onadateapplication', 'Can add application through the API'),", "API'), ('api_change_onadateapplication', 'Can change application through the API'), ('api_delete_onadateapplication', 'Can delete application through", "own application through the API'), ('api_add_own_onadateapplication', 'Can add own application through the API'),", "delete own application through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr',", "'permissions': (('api_read_periodapplication', 'Can view application through the API'), ('api_add_periodapplication', 'Can add application through", "'Can change own application through the API'), ('api_delete_own_periodapplication', 'Can delete own application through", "application through the API'), ('api_read_own_periodapplication', 'Can view own application through the API'), ('api_add_own_periodapplication',", "API'), ('api_read_own_periodapplication', 'Can view own application through the API'), ('api_add_own_periodapplication', 'Can add own", "models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period", "change own application through the API'), ('api_delete_own_onadateapplication', 'Can delete own application through the", "API'), ('api_change_periodapplication', 'Can change application through the API'), ('api_delete_periodapplication', 'Can delete application through", "class Migration(migrations.Migration): dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [", "fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a", "serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a date application', 'verbose_name_plural': 'On a date", "application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view application through the API'), ('api_add_periodapplication',", "utf-8 -*- # Generated by Django 1.11.17 on 2020-11-02 11:30 from __future__ import", "application', 'permissions': (('api_read_periodapplication', 'Can view application through the API'), ('api_add_periodapplication', 'Can add application", "the API'), ('api_read_own_periodapplication', 'Can view own application through the API'), ('api_add_own_periodapplication', 'Can add", "('api_read_own_periodapplication', 'Can view own application through the API'), ('api_add_own_periodapplication', 'Can add own application", "application through the API'), ('api_delete_own_periodapplication', 'Can delete own application through the API')), },", "application through the API'), ('api_read_own_onadateapplication', 'Can view own application through the API'), ('api_add_own_onadateapplication',", "('api_add_own_onadateapplication', 'Can add own application through the API'), ('api_change_own_onadateapplication', 'Can change own application", "options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view application through", "on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application',", "primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a date application', 'verbose_name_plural': 'On a", "application through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE,", "11:30 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class", "through the API'), ('api_change_periodapplication', 'Can change application through the API'), ('api_delete_periodapplication', 'Can delete", "application through the API'), ('api_change_own_onadateapplication', 'Can change own application through the API'), ('api_delete_own_onadateapplication',", "change own application through the API'), ('api_delete_own_periodapplication', 'Can delete own application through the", "serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application',", "own application through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True,", "'verbose_name': 'On a date application', 'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication', 'Can", "fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={", "through the API'), ('api_add_periodapplication', 'Can add application through the API'), ('api_change_periodapplication', 'Can change", "add own application through the API'), ('api_change_own_onadateapplication', 'Can change own application through the", "'Can delete own application through the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[", "API'), ('api_add_own_periodapplication', 'Can add own application through the API'), ('api_change_own_periodapplication', 'Can change own", "], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view application", "on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a date application', 'verbose_name_plural':", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('activities',", "through the API'), ('api_read_own_onadateapplication', 'Can view own application through the API'), ('api_add_own_onadateapplication', 'Can", "add application through the API'), ('api_change_onadateapplication', 'Can change application through the API'), ('api_delete_onadateapplication',", "through the API'), ('api_add_own_onadateapplication', 'Can add own application through the API'), ('api_change_own_onadateapplication', 'Can", "dependencies = [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication',", "application', 'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication', 'Can view application through the", "], options={ 'verbose_name': 'On a date application', 'verbose_name_plural': 'On a date application', 'permissions':", "application through the API'), ('api_add_own_onadateapplication', 'Can add own application through the API'), ('api_change_own_onadateapplication',", "own application through the API'), ('api_delete_own_onadateapplication', 'Can delete own application through the API')),", "}, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')),", "the API'), ('api_delete_own_onadateapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',), ),", "'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view application through the", "through the API'), ('api_add_own_periodapplication', 'Can add own application through the API'), ('api_change_own_periodapplication', 'Can", "own application through the API'), ('api_add_own_periodapplication', 'Can add own application through the API'),", "'Can view own application through the API'), ('api_add_own_periodapplication', 'Can add own application through", "= [ ('activities', '0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[", "-*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-11-02 11:30 from", "'0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True,", "migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name':", "API'), ('api_delete_own_periodapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',), ), migrations.RemoveField(", "own application through the API'), ('api_change_own_onadateapplication', 'Can change own application through the API'),", "date application', 'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication', 'Can view application through", "by Django 1.11.17 on 2020-11-02 11:30 from __future__ import unicode_literals from django.db import", "('api_change_own_periodapplication', 'Can change own application through the API'), ('api_delete_own_periodapplication', 'Can delete own application", "operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')),", "'Period application', 'permissions': (('api_read_periodapplication', 'Can view application through the API'), ('api_add_periodapplication', 'Can add", "('api_change_periodapplication', 'Can change application through the API'), ('api_delete_periodapplication', 'Can delete application through the", "the API'), ('api_change_own_onadateapplication', 'Can change own application through the API'), ('api_delete_own_onadateapplication', 'Can delete", "application through the API'), ('api_add_periodapplication', 'Can add application through the API'), ('api_change_periodapplication', 'Can", "('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name':", "the API'), ('api_delete_periodapplication', 'Can delete application through the API'), ('api_read_own_periodapplication', 'Can view own", "('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication',", "view application through the API'), ('api_add_periodapplication', 'Can add application through the API'), ('api_change_periodapplication',", "through the API'), ('api_delete_own_onadateapplication', 'Can delete own application through the API')), }, bases=('activities.contribution',),", "('api_add_periodapplication', 'Can add application through the API'), ('api_change_periodapplication', 'Can change application through the", "application through the API'), ('api_change_periodapplication', 'Can change application through the API'), ('api_delete_periodapplication', 'Can", "import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "through the API'), ('api_delete_periodapplication', 'Can delete application through the API'), ('api_read_own_periodapplication', 'Can view", "the API'), ('api_add_own_periodapplication', 'Can add own application through the API'), ('api_change_own_periodapplication', 'Can change", "API'), ('api_read_own_onadateapplication', 'Can view own application through the API'), ('api_add_own_onadateapplication', 'Can add own", "through the API'), ('api_change_own_periodapplication', 'Can change own application through the API'), ('api_delete_own_periodapplication', 'Can", "change application through the API'), ('api_delete_periodapplication', 'Can delete application through the API'), ('api_read_own_periodapplication',", "API'), ('api_change_own_periodapplication', 'Can change own application through the API'), ('api_delete_own_periodapplication', 'Can delete own", "through the API'), ('api_add_onadateapplication', 'Can add application through the API'), ('api_change_onadateapplication', 'Can change", "= [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ],", "2020-11-02 11:30 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion", "the API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True,", "models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a date application',", "('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,", "own application through the API'), ('api_delete_own_periodapplication', 'Can delete own application through the API')),", "application through the API'), ('api_add_onadateapplication', 'Can add application through the API'), ('api_change_onadateapplication', 'Can", "'Can change own application through the API'), ('api_delete_own_onadateapplication', 'Can delete own application through", "API')), }, bases=('activities.contribution',), ), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False,", "name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ],", "name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On", "Generated by Django 1.11.17 on 2020-11-02 11:30 from __future__ import unicode_literals from django.db", "), migrations.CreateModel( name='PeriodApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True,", "'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view application through the API'), ('api_add_periodapplication', 'Can", "application through the API'), ('api_change_own_periodapplication', 'Can change own application through the API'), ('api_delete_own_periodapplication',", "'0027_contributionvalue'), ('time_based', '0019_auto_20201030_1317'), ] operations = [ migrations.CreateModel( name='OnADateApplication', fields=[ ('contribution_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE,", "'Can view own application through the API'), ('api_add_own_onadateapplication', 'Can add own application through", "(('api_read_periodapplication', 'Can view application through the API'), ('api_add_periodapplication', 'Can add application through the", "'Period application', 'verbose_name_plural': 'Period application', 'permissions': (('api_read_periodapplication', 'Can view application through the API'),", "parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ('current_period', models.DateField(blank=True, null=True)), ], options={ 'verbose_name': 'Period application', 'verbose_name_plural':", "to='activities.Contribution')), ], options={ 'verbose_name': 'On a date application', 'verbose_name_plural': 'On a date application',", "the API'), ('api_change_onadateapplication', 'Can change application through the API'), ('api_delete_onadateapplication', 'Can delete application", "own application through the API')), }, bases=('activities.contribution',), ), migrations.RemoveField( model_name='application', name='current_period', ), ]", "application through the API'), ('api_add_own_periodapplication', 'Can add own application through the API'), ('api_change_own_periodapplication',", "'Can view application through the API'), ('api_add_periodapplication', 'Can add application through the API'),", "the API'), ('api_change_own_periodapplication', 'Can change own application through the API'), ('api_delete_own_periodapplication', 'Can delete", "parent_link=True, primary_key=True, serialize=False, to='activities.Contribution')), ], options={ 'verbose_name': 'On a date application', 'verbose_name_plural': 'On", "'Can add application through the API'), ('api_change_onadateapplication', 'Can change application through the API'),", "view own application through the API'), ('api_add_own_periodapplication', 'Can add own application through the", "API'), ('api_delete_periodapplication', 'Can delete application through the API'), ('api_read_own_periodapplication', 'Can view own application", "a date application', 'verbose_name_plural': 'On a date application', 'permissions': (('api_read_onadateapplication', 'Can view application" ]
[ "LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self) self.features[\"chunk_version\"] = 15 INTERFACE_CLASS = LevelDB15Interface", "annotations from amulet.world_interface.chunk.interfaces.leveldb.leveldb_14.interface import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self) self.features[\"chunk_version\"]", "__future__ import annotations from amulet.world_interface.chunk.interfaces.leveldb.leveldb_14.interface import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self):", "import annotations from amulet.world_interface.chunk.interfaces.leveldb.leveldb_14.interface import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self)", "from amulet.world_interface.chunk.interfaces.leveldb.leveldb_14.interface import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self) self.features[\"chunk_version\"] =", "( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self) self.features[\"chunk_version\"] = 15 INTERFACE_CLASS =", "amulet.world_interface.chunk.interfaces.leveldb.leveldb_14.interface import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self) self.features[\"chunk_version\"] = 15", "import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def __init__(self): LevelDB14Interface.__init__(self) self.features[\"chunk_version\"] = 15 INTERFACE_CLASS", "from __future__ import annotations from amulet.world_interface.chunk.interfaces.leveldb.leveldb_14.interface import ( LevelDB14Interface, ) class LevelDB15Interface(LevelDB14Interface): def" ]
[ "start_date = today - datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date,", "today = datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1)", "= today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles:", "datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles =", "<reponame>knishioka/arxiv-bot import datetime from arxiv_bot.arxiv_scraper import ArxivScraper from arxiv_bot.translator import translate def main():", "today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"])", "datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article", "for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"])) print(f'https://arxiv.org/abs/{article[\"id\"]}') if __name__ == \"__main__\":", "- datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for", "end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"])) print(f'https://arxiv.org/abs/{article[\"id\"]}') if __name__", "ArxivScraper from arxiv_bot.translator import translate def main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today()", "from arxiv_bot.arxiv_scraper import ArxivScraper from arxiv_bot.translator import translate def main(): \"\"\"List updated articles.\"\"\"", "\"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date =", "= datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles", "updated articles.\"\"\" today = datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date = today", "arxiv_bot.arxiv_scraper import ArxivScraper from arxiv_bot.translator import translate def main(): \"\"\"List updated articles.\"\"\" today", "import datetime from arxiv_bot.arxiv_scraper import ArxivScraper from arxiv_bot.translator import translate def main(): \"\"\"List", "translate def main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date = today -", "end_date = today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in", "datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"]))", "import ArxivScraper from arxiv_bot.translator import translate def main(): \"\"\"List updated articles.\"\"\" today =", "arxiv_bot.translator import translate def main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date =", "- datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\",", "datetime from arxiv_bot.arxiv_scraper import ArxivScraper from arxiv_bot.translator import translate def main(): \"\"\"List updated", "articles.\"\"\" today = datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date = today -", "from arxiv_bot.translator import translate def main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date", "articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"]))", "ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"])) print(f'https://arxiv.org/abs/{article[\"id\"]}') if", "today - datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\")", "article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"])) print(f'https://arxiv.org/abs/{article[\"id\"]}') if __name__ == \"__main__\": main()", "= ArxivScraper().search(start_date=start_date, end_date=end_date, category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"])) print(f'https://arxiv.org/abs/{article[\"id\"]}')", "def main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date = today - datetime.timedelta(days=2)", "category_id=\"cs.AI\") for article in articles: print(article[\"itemTitle\"]) print(\", \".join(article[\"itemAuthors\"])) print(translate(article[\"itemSummary\"])) print(f'https://arxiv.org/abs/{article[\"id\"]}') if __name__ ==", "import translate def main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date = today", "main(): \"\"\"List updated articles.\"\"\" today = datetime.date.today() start_date = today - datetime.timedelta(days=2) end_date", "= today - datetime.timedelta(days=2) end_date = today - datetime.timedelta(days=1) articles = ArxivScraper().search(start_date=start_date, end_date=end_date," ]